static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
static tree associate_trees (tree, tree, enum tree_code, tree);
static tree const_binop (enum tree_code, tree, tree, int);
-static enum tree_code invert_tree_comparison (enum tree_code, bool);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
static tree combine_comparisons (enum tree_code, enum tree_code,
static tree fold_negate_const (tree, tree);
static tree fold_not_const (tree, tree);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
-static tree fold_relational_hi_lo (enum tree_code *, const tree,
- tree *, tree *);
static bool tree_expr_nonzero_p (tree);
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
return overflow;
}
+
+/* If ARG2 divides ARG1 with zero remainder, carries out the division
+ of type CODE and returns the quotient.
+ Otherwise returns NULL_TREE. */
+
+static tree
+div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
+{
+ unsigned HOST_WIDE_INT int1l, int2l;
+ HOST_WIDE_INT int1h, int2h;
+ unsigned HOST_WIDE_INT quol, reml;
+ HOST_WIDE_INT quoh, remh;
+ tree type = TREE_TYPE (arg1);
+ int uns = TYPE_UNSIGNED (type);
+
+ int1l = TREE_INT_CST_LOW (arg1);
+ int1h = TREE_INT_CST_HIGH (arg1);
+ int2l = TREE_INT_CST_LOW (arg2);
+ int2h = TREE_INT_CST_HIGH (arg2);
+
+ div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
+ &quol, &quoh, &reml, &remh);
+ if (remh != 0 || reml != 0)
+ return NULL_TREE;
+
+ return build_int_cst_wide (type, quol, quoh);
+}
\f
/* Return true if built-in mathematical function specified by CODE
preserves the sign of it argument, i.e. -f(x) == f(-x). */
TREE_OPERAND (t, 1)))
{
tem = negate_expr (TREE_OPERAND (t, 1));
- tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
- tem, TREE_OPERAND (t, 0)));
+ tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
+ tem, TREE_OPERAND (t, 0));
return fold_convert (type, tem);
}
if (negate_expr_p (TREE_OPERAND (t, 0)))
{
tem = negate_expr (TREE_OPERAND (t, 0));
- tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
- tem, TREE_OPERAND (t, 1)));
+ tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
+ tem, TREE_OPERAND (t, 1));
return fold_convert (type, tem);
}
}
if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
&& reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
return fold_convert (type,
- fold (build2 (MINUS_EXPR, TREE_TYPE (t),
- TREE_OPERAND (t, 1),
- TREE_OPERAND (t, 0))));
+ fold_build2 (MINUS_EXPR, TREE_TYPE (t),
+ TREE_OPERAND (t, 1),
+ TREE_OPERAND (t, 0)));
break;
case MULT_EXPR:
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
return fold_convert (type,
- fold (build2 (TREE_CODE (t), TREE_TYPE (t),
- TREE_OPERAND (t, 0),
- negate_expr (tem))));
+ fold_build2 (TREE_CODE (t), TREE_TYPE (t),
+ TREE_OPERAND (t, 0),
+ negate_expr (tem)));
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
return fold_convert (type,
- fold (build2 (TREE_CODE (t), TREE_TYPE (t),
- negate_expr (tem),
- TREE_OPERAND (t, 1))));
+ fold_build2 (TREE_CODE (t), TREE_TYPE (t),
+ negate_expr (tem),
+ TREE_OPERAND (t, 1)));
}
break;
? lang_hooks.types.signed_type (type)
: lang_hooks.types.unsigned_type (type);
tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
- temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
+ temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
return fold_convert (type, temp);
}
}
break;
}
- tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
+ tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
return fold_convert (type, tem);
}
\f
fold_convert (type, t2));
}
- return fold (build2 (code, type, fold_convert (type, t1),
- fold_convert (type, t2)));
+ return fold_build2 (code, type, fold_convert (type, t1),
+ fold_convert (type, t2));
}
\f
/* Combine two integer constants ARG1 and ARG2 under operation CODE
int is_sizetype
= (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
int overflow = 0;
- int no_overflow = 0;
int1l = TREE_INT_CST_LOW (arg1);
int1h = TREE_INT_CST_HIGH (arg1);
interpretation ruling is needed. */
lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
&low, &hi, !uns);
- no_overflow = 1;
break;
case RROTATE_EXPR:
case RDIV_EXPR:
{
+ tree t1, t2, real, imag;
tree magsquared
= const_binop (PLUS_EXPR,
const_binop (MULT_EXPR, r2, r2, notrunc),
const_binop (MULT_EXPR, i2, i2, notrunc),
notrunc);
- t = build_complex (type,
- const_binop
- (INTEGRAL_TYPE_P (TREE_TYPE (r1))
- ? TRUNC_DIV_EXPR : RDIV_EXPR,
- const_binop (PLUS_EXPR,
- const_binop (MULT_EXPR, r1, r2,
- notrunc),
- const_binop (MULT_EXPR, i1, i2,
- notrunc),
- notrunc),
- magsquared, notrunc),
- const_binop
- (INTEGRAL_TYPE_P (TREE_TYPE (r1))
- ? TRUNC_DIV_EXPR : RDIV_EXPR,
- const_binop (MINUS_EXPR,
- const_binop (MULT_EXPR, i1, r2,
- notrunc),
- const_binop (MULT_EXPR, r1, i2,
- notrunc),
- notrunc),
- magsquared, notrunc));
+ t1 = const_binop (PLUS_EXPR,
+ const_binop (MULT_EXPR, r1, r2, notrunc),
+ const_binop (MULT_EXPR, i1, i2, notrunc),
+ notrunc);
+ t2 = const_binop (MINUS_EXPR,
+ const_binop (MULT_EXPR, i1, r2, notrunc),
+ const_binop (MULT_EXPR, r1, i2, notrunc),
+ notrunc);
+
+ if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
+ {
+ real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
+ imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
+ }
+ else
+ {
+ real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
+ imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
+ if (!real || !imag)
+ return NULL_TREE;
+ }
+
+ t = build_complex (type, real, imag);
}
break;
if (arg0 == error_mark_node || arg1 == error_mark_node)
return error_mark_node;
- return fold (build2 (code, type, arg0, arg1));
+ return fold_build2 (code, type, arg0, arg1);
}
/* Given two values, either both of sizetype or both of bitsizetype,
if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
|| lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
TYPE_MAIN_VARIANT (orig)))
- return fold (build1 (NOP_EXPR, type, arg));
+ return fold_build1 (NOP_EXPR, type, arg);
switch (TREE_CODE (type))
{
}
if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
|| TREE_CODE (orig) == OFFSET_TYPE)
- return fold (build1 (NOP_EXPR, type, arg));
+ return fold_build1 (NOP_EXPR, type, arg);
if (TREE_CODE (orig) == COMPLEX_TYPE)
{
- tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
+ tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
return fold_convert (type, tem);
}
gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
&& tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
- return fold (build1 (NOP_EXPR, type, arg));
+ return fold_build1 (NOP_EXPR, type, arg);
case REAL_TYPE:
if (TREE_CODE (arg) == INTEGER_CST)
case INTEGER_TYPE: case CHAR_TYPE:
case BOOLEAN_TYPE: case ENUMERAL_TYPE:
case POINTER_TYPE: case REFERENCE_TYPE:
- return fold (build1 (FLOAT_EXPR, type, arg));
+ return fold_build1 (FLOAT_EXPR, type, arg);
case REAL_TYPE:
- return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
- type, arg));
+ return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
+ type, arg);
case COMPLEX_TYPE:
- tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
+ tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
return fold_convert (type, tem);
default:
{
rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
- return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
+ return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
}
arg = save_expr (arg);
- rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
- ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
+ rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
+ ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
rpart = fold_convert (TREE_TYPE (type), rpart);
ipart = fold_convert (TREE_TYPE (type), ipart);
- return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
+ return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
}
default:
gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
|| TREE_CODE (orig) == VECTOR_TYPE);
- return fold (build1 (NOP_EXPR, type, arg));
+ return fold_build1 (NOP_EXPR, type, arg);
case VOID_TYPE:
- return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
+ return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
default:
gcc_unreachable ();
}
}
\f
-/* Return an expr equal to X but certainly not valid as an lvalue. */
+/* Return false if expr can be assumed not to be an value, true
+ otherwise. */
-tree
-non_lvalue (tree x)
+static bool
+maybe_lvalue_p (tree x)
{
- /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
- us. */
- if (in_gimple_form)
- return x;
-
/* We only need to wrap lvalue tree codes. */
switch (TREE_CODE (x))
{
/* Assume the worst for front-end tree codes. */
if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
break;
- return x;
+ return false;
}
+
+ return true;
+}
+
+/* Return an expr equal to X but certainly not valid as an lvalue. */
+
+tree
+non_lvalue (tree x)
+{
+ /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
+ us. */
+ if (in_gimple_form)
+ return x;
+
+ if (! maybe_lvalue_p (x))
+ return x;
return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
}
comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
as well: if reversing the comparison is unsafe, return ERROR_MARK. */
-static enum tree_code
+enum tree_code
invert_tree_comparison (enum tree_code code, bool honor_nans)
{
if (honor_nans && flag_trapping_math)
else if (compcode == COMPCODE_FALSE)
return constant_boolean_node (false, truth_type);
else
- return fold (build2 (compcode_to_comparison (compcode),
- truth_type, ll_arg, lr_arg));
+ return fold_build2 (compcode_to_comparison (compcode),
+ truth_type, ll_arg, lr_arg);
}
/* Return nonzero if CODE is a tree code that represents a truth value. */
switch (class)
{
case tcc_unary:
- return fold (build1 (code, type,
- eval_subst (TREE_OPERAND (arg, 0),
- old0, new0, old1, new1)));
+ return fold_build1 (code, type,
+ eval_subst (TREE_OPERAND (arg, 0),
+ old0, new0, old1, new1));
case tcc_binary:
- return fold (build2 (code, type,
- eval_subst (TREE_OPERAND (arg, 0),
- old0, new0, old1, new1),
- eval_subst (TREE_OPERAND (arg, 1),
- old0, new0, old1, new1)));
+ return fold_build2 (code, type,
+ eval_subst (TREE_OPERAND (arg, 0),
+ old0, new0, old1, new1),
+ eval_subst (TREE_OPERAND (arg, 1),
+ old0, new0, old1, new1));
case tcc_expression:
switch (code)
return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
case COND_EXPR:
- return fold (build3 (code, type,
- eval_subst (TREE_OPERAND (arg, 0),
- old0, new0, old1, new1),
- eval_subst (TREE_OPERAND (arg, 1),
- old0, new0, old1, new1),
- eval_subst (TREE_OPERAND (arg, 2),
- old0, new0, old1, new1)));
+ return fold_build3 (code, type,
+ eval_subst (TREE_OPERAND (arg, 0),
+ old0, new0, old1, new1),
+ eval_subst (TREE_OPERAND (arg, 1),
+ old0, new0, old1, new1),
+ eval_subst (TREE_OPERAND (arg, 2),
+ old0, new0, old1, new1));
default:
break;
}
else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
arg1 = new1;
- return fold (build2 (code, type, arg0, arg1));
+ return fold_build2 (code, type, arg0, arg1);
}
default:
else
return 0;
- return fold (build2 (TREE_CODE (arg0), type, common,
- fold (build2 (code, type, left, right))));
+ return fold_build2 (TREE_CODE (arg0), type, common,
+ fold_build2 (code, type, left, right));
+}
+
+/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
+ with code CODE. This optimization is unsafe. */
+static tree
+distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
+{
+ bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
+ bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
+
+ /* (A / C) +- (B / C) -> (A +- B) / C. */
+ if (mul0 == mul1
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1), 0))
+ return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
+ fold_build2 (code, type,
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0)),
+ TREE_OPERAND (arg0, 1));
+
+ /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
+ if (operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), 0)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
+ {
+ REAL_VALUE_TYPE r0, r1;
+ r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
+ r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
+ if (!mul0)
+ real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
+ if (!mul1)
+ real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
+ real_arithmetic (&r0, code, &r0, &r1);
+ return fold_build2 (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ build_real (type, r0));
+ }
+
+ return NULL_TREE;
}
\f
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
fold_convert (unsigned_type, rhs),
size_int (lbitsize), 0)))
{
- warning ("comparison is always %d due to width of bit-field",
+ warning (0, "comparison is always %d due to width of bit-field",
code == NE_EXPR);
return constant_boolean_node (code == NE_EXPR, compare_type);
}
size_int (lbitsize - 1), 0);
if (! integer_zerop (tem) && ! integer_all_onesp (tem))
{
- warning ("comparison is always %d due to width of bit-field",
+ warning (0, "comparison is always %d due to width of bit-field",
code == NE_EXPR);
return constant_boolean_node (code == NE_EXPR, compare_type);
}
/* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
if (and_mask != 0)
- mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
- fold_convert (unsigned_type, and_mask), mask));
+ mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
+ fold_convert (unsigned_type, and_mask), mask);
*pmask = mask;
*pand_mask = and_mask;
if (arg0 != 0 && arg1 != 0)
{
- tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
- arg0, fold_convert (TREE_TYPE (arg0), arg1)));
+ tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
+ arg0, fold_convert (TREE_TYPE (arg0), arg1));
STRIP_NOPS (tem);
return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
}
: TYPE_MAX_VALUE (arg0_type);
if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
- high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
- fold_convert (arg0_type,
- high_positive),
- fold_convert (arg0_type,
- integer_one_node)));
+ high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
+ fold_convert (arg0_type,
+ high_positive),
+ fold_convert (arg0_type,
+ integer_one_node));
/* If the low bound is specified, "and" the range with the
range for which the original unsigned value will be
return fold_convert (type, integer_one_node);
if (low == 0)
- return fold (build2 (LE_EXPR, type, exp, high));
+ return fold_build2 (LE_EXPR, type, exp, high);
if (high == 0)
- return fold (build2 (GE_EXPR, type, exp, low));
+ return fold_build2 (GE_EXPR, type, exp, low);
if (operand_equal_p (low, high, 0))
- return fold (build2 (EQ_EXPR, type, exp, low));
+ return fold_build2 (EQ_EXPR, type, exp, low);
if (integer_zerop (low))
{
etype = lang_hooks.types.signed_type (etype);
exp = fold_convert (etype, exp);
}
- return fold (build2 (GT_EXPR, type, exp,
- fold_convert (etype, integer_zero_node)));
+ return fold_build2 (GT_EXPR, type, exp,
+ fold_convert (etype, integer_zero_node));
}
}
if (value != 0 && ! TREE_OVERFLOW (value))
return build_range_check (type,
- fold (build2 (MINUS_EXPR, etype, exp, low)),
+ fold_build2 (MINUS_EXPR, etype, exp, low),
1, fold_convert (etype, integer_zero_node),
value);
if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
? real_zerop (arg01)
: integer_zerop (arg01))
- && TREE_CODE (arg2) == NEGATE_EXPR
- && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
+ && ((TREE_CODE (arg2) == NEGATE_EXPR
+ && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
+ /* In the case that A is of the form X-Y, '-A' (arg2) may
+ have already been folded to Y-X, check for that. */
+ || (TREE_CODE (arg1) == MINUS_EXPR
+ && TREE_CODE (arg2) == MINUS_EXPR
+ && operand_equal_p (TREE_OPERAND (arg1, 0),
+ TREE_OPERAND (arg2, 1), 0)
+ && operand_equal_p (TREE_OPERAND (arg1, 1),
+ TREE_OPERAND (arg2, 0), 0))))
switch (comp_code)
{
case EQ_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
arg1 = fold_convert (lang_hooks.types.signed_type
(TREE_TYPE (arg1)), arg1);
- tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
+ tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
return pedantic_non_lvalue (fold_convert (type, tem));
case UNLE_EXPR:
case UNLT_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
arg1 = fold_convert (lang_hooks.types.signed_type
(TREE_TYPE (arg1)), arg1);
- tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
+ tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
return negate_expr (fold_convert (type, tem));
default:
gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
a number and A is not. The conditions in the original
expressions will be false, so all four give B. The min()
and max() versions would give a NaN instead. */
- if (operand_equal_for_comparison_p (arg01, arg2, arg00))
+ if (operand_equal_for_comparison_p (arg01, arg2, arg00)
+ /* Avoid these transformations if the COND_EXPR may be used
+ as an lvalue in the C++ front-end. PR c++/19199. */
+ && (in_gimple_form
+ || strcmp (lang_hooks.name, "GNU C++") != 0
+ || ! maybe_lvalue_p (arg1)
+ || ! maybe_lvalue_p (arg2)))
{
tree comp_op0 = arg00;
tree comp_op1 = arg01;
comp_op0 = fold_convert (comp_type, comp_op0);
comp_op1 = fold_convert (comp_type, comp_op1);
tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
- ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
- : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
+ ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
+ : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
return pedantic_non_lvalue (fold_convert (type, tem));
}
break;
comp_op0 = fold_convert (comp_type, comp_op0);
comp_op1 = fold_convert (comp_type, comp_op1);
tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
- ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
- : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
+ ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
+ : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
return pedantic_non_lvalue (fold_convert (type, tem));
}
break;
case EQ_EXPR:
/* We can replace A with C1 in this case. */
arg1 = fold_convert (type, arg01);
- return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
+ return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
case LT_EXPR:
/* If C1 is C2 + 1, this is min(A, C2). */
const_binop (PLUS_EXPR, arg2,
integer_one_node, 0),
OEP_ONLY_CONST))
- return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
- type, arg1, arg2)));
+ return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
+ type, arg1, arg2));
break;
case LE_EXPR:
const_binop (MINUS_EXPR, arg2,
integer_one_node, 0),
OEP_ONLY_CONST))
- return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
- type, arg1, arg2)));
+ return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
+ type, arg1, arg2));
break;
case GT_EXPR:
const_binop (MINUS_EXPR, arg2,
integer_one_node, 0),
OEP_ONLY_CONST))
- return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
- type, arg1, arg2)));
+ return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
+ type, arg1, arg2));
break;
case GE_EXPR:
const_binop (PLUS_EXPR, arg2,
integer_one_node, 0),
OEP_ONLY_CONST))
- return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
- type, arg1, arg2)));
+ return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
+ type, arg1, arg2));
break;
case NE_EXPR:
break;
l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
- fold (build1 (BIT_NOT_EXPR,
- lntype, ll_mask)),
+ fold_build1 (BIT_NOT_EXPR,
+ lntype, ll_mask),
0)))
{
- warning ("comparison is always %d", wanted_code == NE_EXPR);
+ warning (0, "comparison is always %d", wanted_code == NE_EXPR);
return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
}
r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
- fold (build1 (BIT_NOT_EXPR,
- lntype, rl_mask)),
+ fold_build1 (BIT_NOT_EXPR,
+ lntype, rl_mask),
0)))
{
- warning ("comparison is always %d", wanted_code == NE_EXPR);
+ warning (0, "comparison is always %d", wanted_code == NE_EXPR);
return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
}
{
if (wanted_code == NE_EXPR)
{
- warning ("%<or%> of unmatched not-equal tests is always 1");
+ warning (0, "%<or%> of unmatched not-equal tests is always 1");
return constant_boolean_node (true, truth_type);
}
else
{
- warning ("%<and%> of mutually exclusive equal-tests is always 0");
+ warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
return constant_boolean_node (false, truth_type);
}
}
case GE_EXPR:
return
- fold (build2 (TRUTH_ORIF_EXPR, type,
- optimize_minmax_comparison
- (EQ_EXPR, type, arg0, comp_const),
- optimize_minmax_comparison
- (GT_EXPR, type, arg0, comp_const)));
+ fold_build2 (TRUTH_ORIF_EXPR, type,
+ optimize_minmax_comparison
+ (EQ_EXPR, type, arg0, comp_const),
+ optimize_minmax_comparison
+ (GT_EXPR, type, arg0, comp_const));
case EQ_EXPR:
if (op_code == MAX_EXPR && consts_equal)
/* MAX (X, 0) == 0 -> X <= 0 */
- return fold (build2 (LE_EXPR, type, inner, comp_const));
+ return fold_build2 (LE_EXPR, type, inner, comp_const);
else if (op_code == MAX_EXPR && consts_lt)
/* MAX (X, 0) == 5 -> X == 5 */
- return fold (build2 (EQ_EXPR, type, inner, comp_const));
+ return fold_build2 (EQ_EXPR, type, inner, comp_const);
else if (op_code == MAX_EXPR)
/* MAX (X, 0) == -1 -> false */
else if (consts_equal)
/* MIN (X, 0) == 0 -> X >= 0 */
- return fold (build2 (GE_EXPR, type, inner, comp_const));
+ return fold_build2 (GE_EXPR, type, inner, comp_const);
else if (consts_lt)
/* MIN (X, 0) == 5 -> false */
else
/* MIN (X, 0) == -1 -> X == -1 */
- return fold (build2 (EQ_EXPR, type, inner, comp_const));
+ return fold_build2 (EQ_EXPR, type, inner, comp_const);
case GT_EXPR:
if (op_code == MAX_EXPR && (consts_equal || consts_lt))
/* MAX (X, 0) > 0 -> X > 0
MAX (X, 0) > 5 -> X > 5 */
- return fold (build2 (GT_EXPR, type, inner, comp_const));
+ return fold_build2 (GT_EXPR, type, inner, comp_const);
else if (op_code == MAX_EXPR)
/* MAX (X, 0) > -1 -> true */
else
/* MIN (X, 0) > -1 -> X > -1 */
- return fold (build2 (GT_EXPR, type, inner, comp_const));
+ return fold_build2 (GT_EXPR, type, inner, comp_const);
default:
return NULL_TREE;
tree cstype = (*lang_hooks.types.signed_type) (ctype);
if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
{
- t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
+ t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
return fold_convert (ctype, t1);
}
break;
/* FALLTHROUGH */
case NEGATE_EXPR:
if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
- return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
+ return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
break;
case MIN_EXPR: case MAX_EXPR:
if (tree_int_cst_sgn (c) < 0)
tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
- return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
- fold_convert (ctype, t2)));
+ return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
+ fold_convert (ctype, t2));
}
break;
are divisible by c. */
|| (multiple_of_p (ctype, op0, c)
&& multiple_of_p (ctype, op1, c))))
- return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
- fold_convert (ctype, t2)));
+ return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
+ fold_convert (ctype, t2));
/* If this was a subtraction, negate OP1 and set it to be an addition.
This simplifies the logic below. */
/* If we were able to eliminate our operation from the first side,
apply our operation to the second side and reform the PLUS. */
if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
- return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
+ return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
/* The last case is if we are a multiply. In that case, we can
apply the distributive law to commute the multiply and addition
if the multiplication of the constants doesn't overflow. */
if (code == MULT_EXPR)
- return fold (build2 (tcode, ctype,
- fold (build2 (code, ctype,
- fold_convert (ctype, op0),
- fold_convert (ctype, c))),
- op1));
+ return fold_build2 (tcode, ctype,
+ fold_build2 (code, ctype,
+ fold_convert (ctype, op0),
+ fold_convert (ctype, c)),
+ op1);
break;
do something only if the second operand is a constant. */
if (same_p
&& (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
- return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
- fold_convert (ctype, op1)));
+ return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
+ fold_convert (ctype, op1));
else if (tcode == MULT_EXPR && code == MULT_EXPR
&& (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
- return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
- fold_convert (ctype, t1)));
+ return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
+ fold_convert (ctype, t1));
else if (TREE_CODE (op1) != INTEGER_CST)
return 0;
&& 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
fold_convert (ctype, c), 0))
&& ! TREE_OVERFLOW (t1))
- return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
+ return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
/* If these operations "cancel" each other, we have the main
optimizations of this pass, which occur when either constant is a
&& code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
{
if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
- return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
- fold_convert (ctype,
- const_binop (TRUNC_DIV_EXPR,
- op1, c, 0))));
+ return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
+ fold_convert (ctype,
+ const_binop (TRUNC_DIV_EXPR,
+ op1, c, 0)));
else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
- return fold (build2 (code, ctype, fold_convert (ctype, op0),
- fold_convert (ctype,
- const_binop (TRUNC_DIV_EXPR,
- c, op1, 0))));
+ return fold_build2 (code, ctype, fold_convert (ctype, op0),
+ fold_convert (ctype,
+ const_binop (TRUNC_DIV_EXPR,
+ c, op1, 0)));
}
break;
/* Return true if expr looks like an ARRAY_REF and set base and
offset to the appropriate trees. If there is no offset,
- offset is set to NULL_TREE. */
+ offset is set to NULL_TREE. Base will be canonicalized to
+ something you can get the element type from using
+ TREE_TYPE (TREE_TYPE (base)). */
static bool
extract_array_ref (tree expr, tree *base, tree *offset)
{
- /* We have to be careful with stripping nops as with the
- base type the meaning of the offset can change. */
- tree inner_expr = expr;
- STRIP_NOPS (inner_expr);
/* One canonical form is a PLUS_EXPR with the first
argument being an ADDR_EXPR with a possible NOP_EXPR
attached. */
if (TREE_CODE (expr) == PLUS_EXPR)
{
tree op0 = TREE_OPERAND (expr, 0);
+ tree inner_base, dummy1;
+ /* Strip NOP_EXPRs here because the C frontends and/or
+ folders present us (int *)&x.a + 4B possibly. */
STRIP_NOPS (op0);
- if (TREE_CODE (op0) == ADDR_EXPR)
+ if (extract_array_ref (op0, &inner_base, &dummy1))
{
- *base = TREE_OPERAND (expr, 0);
- *offset = TREE_OPERAND (expr, 1);
+ *base = inner_base;
+ if (dummy1 == NULL_TREE)
+ *offset = TREE_OPERAND (expr, 1);
+ else
+ *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
+ dummy1, TREE_OPERAND (expr, 1));
return true;
}
}
offset. For other arguments to the ADDR_EXPR we assume
zero offset and as such do not care about the ADDR_EXPR
type and strip possible nops from it. */
- else if (TREE_CODE (inner_expr) == ADDR_EXPR)
+ else if (TREE_CODE (expr) == ADDR_EXPR)
{
- tree op0 = TREE_OPERAND (inner_expr, 0);
+ tree op0 = TREE_OPERAND (expr, 0);
if (TREE_CODE (op0) == ARRAY_REF)
{
- *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
+ *base = TREE_OPERAND (op0, 0);
*offset = TREE_OPERAND (op0, 1);
}
else
{
- *base = inner_expr;
+ /* Handle array-to-pointer decay as &a. */
+ if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
+ *base = TREE_OPERAND (expr, 0);
+ else
+ *base = expr;
*offset = NULL_TREE;
}
return true;
}
+ /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
+ else if (SSA_VAR_P (expr)
+ && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
+ {
+ *base = expr;
+ *offset = NULL_TREE;
+ return true;
+ }
return false;
}
if (lhs == 0)
{
true_value = fold_convert (cond_type, true_value);
- lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
- : build2 (code, type, arg, true_value));
+ if (cond_first_p)
+ lhs = fold_build2 (code, type, true_value, arg);
+ else
+ lhs = fold_build2 (code, type, arg, true_value);
}
if (rhs == 0)
{
false_value = fold_convert (cond_type, false_value);
- rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
- : build2 (code, type, arg, false_value));
+ if (cond_first_p)
+ rhs = fold_build2 (code, type, false_value, arg);
+ else
+ rhs = fold_build2 (code, type, arg, false_value);
}
- test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
+ test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
return fold_convert (type, test);
}
return omit_one_operand (type, integer_one_node, arg);
/* sqrt(x) > y is the same as x >= 0, if y is negative. */
- return fold (build2 (GE_EXPR, type, arg,
- build_real (TREE_TYPE (arg), dconst0)));
+ return fold_build2 (GE_EXPR, type, arg,
+ build_real (TREE_TYPE (arg), dconst0));
}
else if (code == GT_EXPR || code == GE_EXPR)
{
{
/* sqrt(x) > y is x == +Inf, when y is very large. */
if (HONOR_INFINITIES (mode))
- return fold (build2 (EQ_EXPR, type, arg,
- build_real (TREE_TYPE (arg), c2)));
+ return fold_build2 (EQ_EXPR, type, arg,
+ build_real (TREE_TYPE (arg), c2));
/* sqrt(x) > y is always false, when y is very large
and we don't care about infinities. */
}
/* sqrt(x) > c is the same as x > c*c. */
- return fold (build2 (code, type, arg,
- build_real (TREE_TYPE (arg), c2)));
+ return fold_build2 (code, type, arg,
+ build_real (TREE_TYPE (arg), c2));
}
else if (code == LT_EXPR || code == LE_EXPR)
{
/* sqrt(x) < y is x != +Inf when y is very large and we
don't care about NaNs. */
if (! HONOR_NANS (mode))
- return fold (build2 (NE_EXPR, type, arg,
- build_real (TREE_TYPE (arg), c2)));
+ return fold_build2 (NE_EXPR, type, arg,
+ build_real (TREE_TYPE (arg), c2));
/* sqrt(x) < y is x >= 0 when y is very large and we
don't care about Infinities. */
if (! HONOR_INFINITIES (mode))
- return fold (build2 (GE_EXPR, type, arg,
- build_real (TREE_TYPE (arg), dconst0)));
+ return fold_build2 (GE_EXPR, type, arg,
+ build_real (TREE_TYPE (arg), dconst0));
/* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
if (lang_hooks.decls.global_bindings_p () != 0
return NULL_TREE;
arg = save_expr (arg);
- return fold (build2 (TRUTH_ANDIF_EXPR, type,
- fold (build2 (GE_EXPR, type, arg,
- build_real (TREE_TYPE (arg),
- dconst0))),
- fold (build2 (NE_EXPR, type, arg,
- build_real (TREE_TYPE (arg),
- c2)))));
+ return fold_build2 (TRUTH_ANDIF_EXPR, type,
+ fold_build2 (GE_EXPR, type, arg,
+ build_real (TREE_TYPE (arg),
+ dconst0)),
+ fold_build2 (NE_EXPR, type, arg,
+ build_real (TREE_TYPE (arg),
+ c2)));
}
/* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
if (! HONOR_NANS (mode))
- return fold (build2 (code, type, arg,
- build_real (TREE_TYPE (arg), c2)));
+ return fold_build2 (code, type, arg,
+ build_real (TREE_TYPE (arg), c2));
/* sqrt(x) < c is the same as x >= 0 && x < c*c. */
if (lang_hooks.decls.global_bindings_p () == 0
&& ! CONTAINS_PLACEHOLDER_P (arg))
{
arg = save_expr (arg);
- return fold (build2 (TRUTH_ANDIF_EXPR, type,
- fold (build2 (GE_EXPR, type, arg,
- build_real (TREE_TYPE (arg),
- dconst0))),
- fold (build2 (code, type, arg,
- build_real (TREE_TYPE (arg),
- c2)))));
+ return fold_build2 (TRUTH_ANDIF_EXPR, type,
+ fold_build2 (GE_EXPR, type, arg,
+ build_real (TREE_TYPE (arg),
+ dconst0)),
+ fold_build2 (code, type, arg,
+ build_real (TREE_TYPE (arg),
+ c2)));
}
}
}
&& ! CONTAINS_PLACEHOLDER_P (arg0))
{
arg0 = save_expr (arg0);
- return fold (build2 (EQ_EXPR, type, arg0, arg0));
+ return fold_build2 (EQ_EXPR, type, arg0, arg0);
}
break;
case GE_EXPR:
/* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
real_maxval (&max, neg, mode);
- return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
- arg0, build_real (TREE_TYPE (arg0), max)));
+ return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
+ arg0, build_real (TREE_TYPE (arg0), max));
case LT_EXPR:
/* x < +Inf is always equal to x <= DBL_MAX. */
real_maxval (&max, neg, mode);
- return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
- arg0, build_real (TREE_TYPE (arg0), max)));
+ return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
+ arg0, build_real (TREE_TYPE (arg0), max));
case NE_EXPR:
/* x != +Inf is always equal to !(x > DBL_MAX). */
real_maxval (&max, neg, mode);
if (! HONOR_NANS (mode))
- return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
- arg0, build_real (TREE_TYPE (arg0), max)));
+ return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
+ arg0, build_real (TREE_TYPE (arg0), max));
/* The transformation below creates non-gimple code and thus is
not appropriate if we are in gimple form. */
if (in_gimple_form)
return NULL_TREE;
- temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
- arg0, build_real (TREE_TYPE (arg0), max)));
- return fold (build1 (TRUTH_NOT_EXPR, type, temp));
+ temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
+ arg0, build_real (TREE_TYPE (arg0), max));
+ return fold_build1 (TRUTH_NOT_EXPR, type, temp);
default:
break;
if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
return omit_one_operand (type, integer_zero_node, arg00);
if (TREE_OVERFLOW (hi))
- return fold (build2 (GE_EXPR, type, arg00, lo));
+ return fold_build2 (GE_EXPR, type, arg00, lo);
if (TREE_OVERFLOW (lo))
- return fold (build2 (LE_EXPR, type, arg00, hi));
+ return fold_build2 (LE_EXPR, type, arg00, hi);
return build_range_check (type, arg00, 1, lo, hi);
case NE_EXPR:
if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
return omit_one_operand (type, integer_one_node, arg00);
if (TREE_OVERFLOW (hi))
- return fold (build2 (LT_EXPR, type, arg00, lo));
+ return fold_build2 (LT_EXPR, type, arg00, lo);
if (TREE_OVERFLOW (lo))
- return fold (build2 (GT_EXPR, type, arg00, hi));
+ return fold_build2 (GT_EXPR, type, arg00, hi);
return build_range_check (type, arg00, 0, lo, hi);
case LT_EXPR:
if (TREE_OVERFLOW (lo))
return omit_one_operand (type, integer_zero_node, arg00);
- return fold (build2 (LT_EXPR, type, arg00, lo));
+ return fold_build2 (LT_EXPR, type, arg00, lo);
case LE_EXPR:
if (TREE_OVERFLOW (hi))
return omit_one_operand (type, integer_one_node, arg00);
- return fold (build2 (LE_EXPR, type, arg00, hi));
+ return fold_build2 (LE_EXPR, type, arg00, hi);
case GT_EXPR:
if (TREE_OVERFLOW (hi))
return omit_one_operand (type, integer_zero_node, arg00);
- return fold (build2 (GT_EXPR, type, arg00, hi));
+ return fold_build2 (GT_EXPR, type, arg00, hi);
case GE_EXPR:
if (TREE_OVERFLOW (lo))
return omit_one_operand (type, integer_one_node, arg00);
- return fold (build2 (GE_EXPR, type, arg00, lo));
+ return fold_build2 (GE_EXPR, type, arg00, lo);
default:
break;
/* If CODE with arguments ARG0 and ARG1 represents a single bit
+ equality/inequality test, then return a simplified form of the test
+ using a sign testing. Otherwise return NULL. TYPE is the desired
+ result type. */
+
+static tree
+fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
+ tree result_type)
+{
+ /* If this is testing a single bit, we can optimize the test. */
+ if ((code == NE_EXPR || code == EQ_EXPR)
+ && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
+ && integer_pow2p (TREE_OPERAND (arg0, 1)))
+ {
+ /* If we have (A & C) != 0 where C is the sign bit of A, convert
+ this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
+ tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
+
+ if (arg00 != NULL_TREE
+ /* This is only a win if casting to a signed type is cheap,
+ i.e. when arg00's type is not a partial mode. */
+ && TYPE_PRECISION (TREE_TYPE (arg00))
+ == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
+ {
+ tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
+ return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
+ result_type, fold_convert (stype, arg00),
+ fold_convert (stype, integer_zero_node));
+ }
+ }
+
+ return NULL_TREE;
+}
+
+/* If CODE with arguments ARG0 and ARG1 represents a single bit
equality/inequality test, then return a simplified form of
the test using shifts and logical operations. Otherwise return
NULL. TYPE is the desired result type. */
enum machine_mode operand_mode = TYPE_MODE (type);
int ops_unsigned;
tree signed_type, unsigned_type, intermediate_type;
- tree arg00;
+ tree tem;
- /* If we have (A & C) != 0 where C is the sign bit of A, convert
- this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
- arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
- if (arg00 != NULL_TREE
- /* This is only a win if casting to a signed type is cheap,
- i.e. when arg00's type is not a partial mode. */
- && TYPE_PRECISION (TREE_TYPE (arg00))
- == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
- {
- tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
- return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
- result_type, fold_convert (stype, arg00),
- fold_convert (stype, integer_zero_node)));
- }
+ /* First, see if we can fold the single bit test into a sign-bit
+ test. */
+ tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
+ result_type);
+ if (tem)
+ return tem;
/* Otherwise we have (A & C) != 0 where C is a single bit,
convert that into ((A >> C2) & 1). Where C2 = log2(C).
inner, size_int (bitnum));
if (code == EQ_EXPR)
- inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
- inner, integer_one_node));
+ inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
+ inner, integer_one_node);
/* Put the AND last so it can combine with more things. */
inner = build2 (BIT_AND_EXPR, intermediate_type,
return NULL_TREE;
shorter_type = TREE_TYPE (arg0_unw);
+#ifdef HAVE_canonicalize_funcptr_for_compare
+ /* Disable this optimization if we're casting a function pointer
+ type on targets that require function pointer canonicalization. */
+ if (HAVE_canonicalize_funcptr_for_compare
+ && TREE_CODE (shorter_type) == POINTER_TYPE
+ && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
+ return NULL_TREE;
+#endif
+
if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
return NULL_TREE;
|| TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
&& (TREE_TYPE (arg1_unw) == shorter_type
|| (TREE_CODE (arg1_unw) == INTEGER_CST
- && TREE_CODE (shorter_type) == INTEGER_TYPE
+ && (TREE_CODE (shorter_type) == INTEGER_TYPE
+ || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
&& int_fits_type_p (arg1_unw, shorter_type))))
- return fold (build (code, type, arg0_unw,
- fold_convert (shorter_type, arg1_unw)));
+ return fold_build2 (code, type, arg0_unw,
+ fold_convert (shorter_type, arg1_unw));
if (TREE_CODE (arg1_unw) != INTEGER_CST)
return NULL_TREE;
tree arg0_inner, tmp;
tree inner_type, outer_type;
- if (TREE_CODE (arg0) != NOP_EXPR)
+ if (TREE_CODE (arg0) != NOP_EXPR
+ && TREE_CODE (arg0) != CONVERT_EXPR)
return NULL_TREE;
outer_type = TREE_TYPE (arg0);
arg0_inner = TREE_OPERAND (arg0, 0);
inner_type = TREE_TYPE (arg0_inner);
+#ifdef HAVE_canonicalize_funcptr_for_compare
+ /* Disable this optimization if we're casting a function pointer
+ type on targets that require function pointer canonicalization. */
+ if (HAVE_canonicalize_funcptr_for_compare
+ && TREE_CODE (inner_type) == POINTER_TYPE
+ && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
+ return NULL_TREE;
+#endif
+
if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
return NULL_TREE;
if (TREE_CODE (arg1) != INTEGER_CST
- && !(TREE_CODE (arg1) == NOP_EXPR
+ && !((TREE_CODE (arg1) == NOP_EXPR
+ || TREE_CODE (arg1) == CONVERT_EXPR)
&& TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
return NULL_TREE;
else
arg1 = fold_convert (inner_type, arg1);
- return fold (build (code, type, arg0_inner, arg1));
+ return fold_build2 (code, type, arg0_inner, arg1);
}
/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
- step of the array. ADDR is the address. MULT is the multiplicative expression.
+ step of the array. Reconstructs s and delta in the case of s * delta
+ being an integer constant (and thus already folded).
+ ADDR is the address. MULT is the multiplicative expression.
If the function succeeds, the new address expression is returned. Otherwise
NULL_TREE is returned. */
static tree
-try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
+try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
{
tree s, delta, step;
- tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
tree ref = TREE_OPERAND (addr, 0), pref;
tree ret, pos;
tree itype;
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
-
- if (TREE_CODE (arg0) == INTEGER_CST)
+ /* Canonicalize op1 into a possibly non-constant delta
+ and an INTEGER_CST s. */
+ if (TREE_CODE (op1) == MULT_EXPR)
{
- s = arg0;
- delta = arg1;
+ tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
+
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ {
+ s = arg0;
+ delta = arg1;
+ }
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ s = arg1;
+ delta = arg0;
+ }
+ else
+ return NULL_TREE;
}
- else if (TREE_CODE (arg1) == INTEGER_CST)
+ else if (TREE_CODE (op1) == INTEGER_CST)
{
- s = arg1;
- delta = arg0;
+ delta = op1;
+ s = NULL_TREE;
}
else
- return NULL_TREE;
+ {
+ /* Simulate we are delta * 1. */
+ delta = op1;
+ s = integer_one_node;
+ }
for (;; ref = TREE_OPERAND (ref, 0))
{
if (TREE_CODE (ref) == ARRAY_REF)
{
- step = array_ref_element_size (ref);
-
- if (TREE_CODE (step) != INTEGER_CST)
+ itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
+ if (! itype)
continue;
- itype = TREE_TYPE (step);
-
- /* If the type sizes do not match, we might run into problems
- when one of them would overflow. */
- if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
+ step = array_ref_element_size (ref);
+ if (TREE_CODE (step) != INTEGER_CST)
continue;
- if (!operand_equal_p (step, fold_convert (itype, s), 0))
- continue;
+ if (s)
+ {
+ if (! tree_int_cst_equal (step, s))
+ continue;
+ }
+ else
+ {
+ /* Try if delta is a multiple of step. */
+ tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
+ if (! tmp)
+ continue;
+ delta = tmp;
+ }
- delta = fold_convert (itype, delta);
break;
}
pos = TREE_OPERAND (pos, 0);
}
- TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
- TREE_OPERAND (pos, 1),
- delta));
+ TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
+ fold_convert (itype,
+ TREE_OPERAND (pos, 1)),
+ fold_convert (itype, delta));
return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
}
if (TREE_TYPE (a1) != typea)
return NULL_TREE;
- diff = fold (build2 (MINUS_EXPR, typea, a1, a));
+ diff = fold_build2 (MINUS_EXPR, typea, a1, a);
if (!integer_onep (diff))
return NULL_TREE;
- return fold (build2 (GE_EXPR, type, a, y));
+ return fold_build2 (GE_EXPR, type, a, y);
}
/* Fold complex addition when both components are accessible by parts.
inner_type = TREE_TYPE (type);
- rr = fold (build2 (code, inner_type, ar, br));
- ri = fold (build2 (code, inner_type, ai, bi));
+ rr = fold_build2 (code, inner_type, ar, br);
+ ri = fold_build2 (code, inner_type, ai, bi);
- return fold (build2 (COMPLEX_EXPR, type, rr, ri));
+ return fold_build2 (COMPLEX_EXPR, type, rr, ri);
}
/* Perform some simplifications of complex multiplication when one or more
}
else if (ai0 && bi0)
{
- rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
+ rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
ri = zero;
}
else if (ai0 && br0)
{
rr = zero;
- ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
+ ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
}
else if (ar0 && bi0)
{
rr = zero;
- ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
+ ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
}
else if (ar0 && br0)
{
- rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
- rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
+ rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
+ rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
ri = zero;
}
else if (bi0)
{
- rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
- ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
+ rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
+ ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
}
else if (ai0)
{
- rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
- ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
+ rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
+ ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
}
else if (br0)
{
- rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
- rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
- ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
+ rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
+ rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
+ ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
}
else if (ar0)
{
- rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
- rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
- ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
+ rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
+ rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
+ ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
}
else
return NULL;
- return fold (build2 (COMPLEX_EXPR, type, rr, ri));
+ return fold_build2 (COMPLEX_EXPR, type, rr, ri);
}
static tree
if (ai0 && bi0)
{
- rr = fold (build2 (code, inner_type, ar, br));
+ rr = fold_build2 (code, inner_type, ar, br);
ri = zero;
}
else if (ai0 && br0)
{
rr = zero;
- ri = fold (build2 (code, inner_type, ar, bi));
- ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
+ ri = fold_build2 (code, inner_type, ar, bi);
+ ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
}
else if (ar0 && bi0)
{
rr = zero;
- ri = fold (build2 (code, inner_type, ai, br));
+ ri = fold_build2 (code, inner_type, ai, br);
}
else if (ar0 && br0)
{
- rr = fold (build2 (code, inner_type, ai, bi));
+ rr = fold_build2 (code, inner_type, ai, bi);
ri = zero;
}
else if (bi0)
{
- rr = fold (build2 (code, inner_type, ar, br));
- ri = fold (build2 (code, inner_type, ai, br));
+ rr = fold_build2 (code, inner_type, ar, br);
+ ri = fold_build2 (code, inner_type, ai, br);
}
else if (br0)
{
- rr = fold (build2 (code, inner_type, ai, bi));
- ri = fold (build2 (code, inner_type, ar, bi));
- ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
+ rr = fold_build2 (code, inner_type, ai, bi);
+ ri = fold_build2 (code, inner_type, ar, bi);
+ ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
}
else
return NULL;
- return fold (build2 (COMPLEX_EXPR, type, rr, ri));
+ return fold_build2 (COMPLEX_EXPR, type, rr, ri);
}
static tree
return fold_complex_div_parts (type, ar, ai, br, bi, code);
}
-/* Fold a unary expression EXPR. Return the folded expression if
- folding is successful. Otherwise, return the original
- expression. */
+/* Fold a unary expression of code CODE and type TYPE with operand
+ OP0. Return the folded expression if folding is successful.
+ Otherwise, return NULL_TREE. */
-static tree
-fold_unary (tree expr)
+tree
+fold_unary (enum tree_code code, tree type, tree op0)
{
- const tree t = expr;
- const tree type = TREE_TYPE (expr);
tree tem;
- tree op0, arg0;
- enum tree_code code = TREE_CODE (t);
+ tree arg0;
enum tree_code_class kind = TREE_CODE_CLASS (code);
gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 1);
-
- arg0 = op0 = TREE_OPERAND (t, 0);
+ arg0 = op0;
if (arg0)
{
if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
+ fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
else if (TREE_CODE (arg0) == COND_EXPR)
{
tree arg01 = TREE_OPERAND (arg0, 1);
tree arg02 = TREE_OPERAND (arg0, 2);
if (! VOID_TYPE_P (TREE_TYPE (arg01)))
- arg01 = fold (build1 (code, type, arg01));
+ arg01 = fold_build1 (code, type, arg01);
if (! VOID_TYPE_P (TREE_TYPE (arg02)))
- arg02 = fold (build1 (code, type, arg02));
- tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
- arg01, arg02));
+ arg02 = fold_build1 (code, type, arg02);
+ tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
+ arg01, arg02);
/* If this was a conversion, and all we did was to move into
inside the COND_EXPR, bring it back out. But leave it if
return arg0;
}
else if (TREE_CODE (type) != INTEGER_TYPE)
- return fold (build3 (COND_EXPR, type, arg0,
- fold (build1 (code, type,
- integer_one_node)),
- fold (build1 (code, type,
- integer_zero_node))));
+ return fold_build3 (COND_EXPR, type, arg0,
+ fold_build1 (code, type,
+ integer_one_node),
+ fold_build1 (code, type,
+ integer_zero_node));
}
}
int inside_int = INTEGRAL_TYPE_P (inside_type);
int inside_ptr = POINTER_TYPE_P (inside_type);
int inside_float = FLOAT_TYPE_P (inside_type);
+ int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
unsigned int inside_prec = TYPE_PRECISION (inside_type);
int inside_unsignedp = TYPE_UNSIGNED (inside_type);
int inter_int = INTEGRAL_TYPE_P (inter_type);
int inter_ptr = POINTER_TYPE_P (inter_type);
int inter_float = FLOAT_TYPE_P (inter_type);
+ int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
unsigned int inter_prec = TYPE_PRECISION (inter_type);
int inter_unsignedp = TYPE_UNSIGNED (inter_type);
int final_int = INTEGRAL_TYPE_P (type);
int final_ptr = POINTER_TYPE_P (type);
int final_float = FLOAT_TYPE_P (type);
+ int final_vec = TREE_CODE (type) == VECTOR_TYPE;
unsigned int final_prec = TYPE_PRECISION (type);
int final_unsignedp = TYPE_UNSIGNED (type);
if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
&& ((inter_int && final_int) || (inter_float && final_float))
&& inter_prec >= final_prec)
- return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
+ return fold_build1 (code, type, TREE_OPERAND (op0, 0));
/* Likewise, if the intermediate and final types are either both
float or both integer, we don't need the middle conversion if
since then we sometimes need the inner conversion. Likewise if
the outer has a precision not equal to the size of its mode. */
if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
- || (inter_float && inside_float))
+ || (inter_float && inside_float)
+ || (inter_vec && inside_vec))
&& inter_prec >= inside_prec
- && (inter_float || inter_unsignedp == inside_unsignedp)
+ && (inter_float || inter_vec
+ || inter_unsignedp == inside_unsignedp)
&& ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
&& TYPE_MODE (type) == TYPE_MODE (inter_type))
- && ! final_ptr)
- return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
+ && ! final_ptr
+ && (! final_vec || inter_prec == inside_prec))
+ return fold_build1 (code, type, TREE_OPERAND (op0, 0));
/* If we have a sign-extension of a zero-extended value, we can
replace that by a single zero-extension. */
if (inside_int && inter_int && final_int
&& inside_prec < inter_prec && inter_prec < final_prec
&& inside_unsignedp && !inter_unsignedp)
- return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
+ return fold_build1 (code, type, TREE_OPERAND (op0, 0));
/* Two conversions in a row are not needed unless:
- some conversion is floating-point (overstrict for now), or
+ - some conversion is a vector (overstrict for now), or
- the intermediate type is narrower than both initial and
final, or
- the intermediate type and innermost type differ in signedness,
- the final type is a pointer type and the precisions of the
initial and intermediate types differ. */
if (! inside_float && ! inter_float && ! final_float
+ && ! inside_vec && ! inter_vec && ! final_vec
&& (inter_prec > inside_prec || inter_prec > final_prec)
&& ! (inside_int && inter_int
&& inter_unsignedp != inside_unsignedp
&& ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
&& TYPE_MODE (type) == TYPE_MODE (inter_type))
&& ! final_ptr)
- return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
+ return fold_build1 (code, type, TREE_OPERAND (op0, 0));
}
if (TREE_CODE (op0) == MODIFY_EXPR
{
/* Don't leave an assignment inside a conversion
unless assigning a bitfield. */
- tem = build1 (code, type, TREE_OPERAND (op0, 1));
+ tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
/* First do the assignment, then return converted constant. */
- tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, fold (tem));
+ tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
TREE_NO_WARNING (tem) = 1;
TREE_USED (tem) = 1;
return tem;
TREE_INT_CST_HIGH (and1));
tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
TREE_CONSTANT_OVERFLOW (and1));
- return fold (build2 (BIT_AND_EXPR, type,
- fold_convert (type, and0), tem));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_convert (type, and0), tem);
}
}
}
tem = fold_convert_const (code, type, arg0);
- return tem ? tem : t;
+ return tem ? tem : NULL_TREE;
case VIEW_CONVERT_EXPR:
if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
- return t;
+ return NULL_TREE;
case NEGATE_EXPR:
if (negate_expr_p (arg0))
return fold_convert (type, negate_expr (arg0));
/* Convert - (~A) to A + 1. */
if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
- return fold (build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
- build_int_cst (type, 1)));
- return t;
+ return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
+ build_int_cst (type, 1));
+ return NULL_TREE;
case ABS_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
return fold_abs_const (arg0, type);
else if (TREE_CODE (arg0) == NEGATE_EXPR)
- return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
+ return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
/* Convert fabs((double)float) into (double)fabsf(float). */
else if (TREE_CODE (arg0) == NOP_EXPR
&& TREE_CODE (type) == REAL_TYPE)
{
tree targ0 = strip_float_extensions (arg0);
if (targ0 != arg0)
- return fold_convert (type, fold (build1 (ABS_EXPR,
- TREE_TYPE (targ0),
- targ0)));
+ return fold_convert (type, fold_build1 (ABS_EXPR,
+ TREE_TYPE (targ0),
+ targ0));
}
else if (tree_expr_nonnegative_p (arg0))
return arg0;
{
tem = fold_strip_sign_ops (arg0);
if (tem)
- return fold (build1 (ABS_EXPR, type, fold_convert (type, tem)));
+ return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
}
- return t;
+ return NULL_TREE;
case CONJ_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
return build_complex (type, TREE_REALPART (arg0),
negate_expr (TREE_IMAGPART (arg0)));
else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold (build2 (TREE_CODE (arg0), type,
- fold (build1 (CONJ_EXPR, type,
- TREE_OPERAND (arg0, 0))),
- fold (build1 (CONJ_EXPR, type,
- TREE_OPERAND (arg0, 1)))));
+ return fold_build2 (TREE_CODE (arg0), type,
+ fold_build1 (CONJ_EXPR, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (CONJ_EXPR, type,
+ TREE_OPERAND (arg0, 1)));
else if (TREE_CODE (arg0) == CONJ_EXPR)
return TREE_OPERAND (arg0, 0);
- return t;
+ return NULL_TREE;
case BIT_NOT_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST)
return TREE_OPERAND (arg0, 0);
/* Convert ~ (-A) to A - 1. */
else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
- return fold (build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
- build_int_cst (type, 1)));
+ return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
+ build_int_cst (type, 1));
/* Convert ~ (A - 1) or ~ (A + -1) to -A. */
else if (INTEGRAL_TYPE_P (type)
&& ((TREE_CODE (arg0) == MINUS_EXPR
&& integer_onep (TREE_OPERAND (arg0, 1)))
|| (TREE_CODE (arg0) == PLUS_EXPR
&& integer_all_onesp (TREE_OPERAND (arg0, 1)))))
- return fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)));
- return t;
+ return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
+ /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
+ else if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && (tem = fold_unary (BIT_NOT_EXPR, type,
+ fold_convert (type,
+ TREE_OPERAND (arg0, 0)))))
+ return fold_build2 (BIT_XOR_EXPR, type, tem,
+ fold_convert (type, TREE_OPERAND (arg0, 1)));
+ else if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && (tem = fold_unary (BIT_NOT_EXPR, type,
+ fold_convert (type,
+ TREE_OPERAND (arg0, 1)))))
+ return fold_build2 (BIT_XOR_EXPR, type,
+ fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
+
+ return NULL_TREE;
case TRUTH_NOT_EXPR:
/* The argument to invert_truthvalue must have Boolean type. */
tem = invert_truthvalue (arg0);
/* Avoid infinite recursion. */
if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
- return t;
+ return NULL_TREE;
return fold_convert (type, tem);
case REALPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
- return t;
+ return NULL_TREE;
else if (TREE_CODE (arg0) == COMPLEX_EXPR)
return omit_one_operand (type, TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
else if (TREE_CODE (arg0) == COMPLEX_CST)
return TREE_REALPART (arg0);
else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold (build2 (TREE_CODE (arg0), type,
- fold (build1 (REALPART_EXPR, type,
- TREE_OPERAND (arg0, 0))),
- fold (build1 (REALPART_EXPR, type,
- TREE_OPERAND (arg0, 1)))));
- return t;
+ return fold_build2 (TREE_CODE (arg0), type,
+ fold_build1 (REALPART_EXPR, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (REALPART_EXPR, type,
+ TREE_OPERAND (arg0, 1)));
+ return NULL_TREE;
case IMAGPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
else if (TREE_CODE (arg0) == COMPLEX_CST)
return TREE_IMAGPART (arg0);
else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold (build2 (TREE_CODE (arg0), type,
- fold (build1 (IMAGPART_EXPR, type,
- TREE_OPERAND (arg0, 0))),
- fold (build1 (IMAGPART_EXPR, type,
- TREE_OPERAND (arg0, 1)))));
- return t;
+ return fold_build2 (TREE_CODE (arg0), type,
+ fold_build1 (IMAGPART_EXPR, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (IMAGPART_EXPR, type,
+ TREE_OPERAND (arg0, 1)));
+ return NULL_TREE;
default:
- return t;
+ return NULL_TREE;
} /* switch (code) */
}
-/* Fold a binary expression EXPR. Return the folded expression if
- folding is successful. Otherwise, return the original
- expression. */
+/* Fold a binary expression of code CODE and type TYPE with operands
+ OP0 and OP1. Return the folded expression if folding is
+ successful. Otherwise, return NULL_TREE. */
-static tree
-fold_binary (tree expr)
+tree
+fold_binary (enum tree_code code, tree type, tree op0, tree op1)
{
- const tree t = expr;
- const tree type = TREE_TYPE (expr);
tree t1 = NULL_TREE;
tree tem;
- tree op0, op1;
tree arg0 = NULL_TREE, arg1 = NULL_TREE;
- enum tree_code code = TREE_CODE (t);
enum tree_code_class kind = TREE_CODE_CLASS (code);
/* WINS will be nonzero when the switch is done
gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 2);
- arg0 = op0 = TREE_OPERAND (t, 0);
- arg1 = op1 = TREE_OPERAND (t, 1);
+ arg0 = op0;
+ arg1 = op1;
if (arg0)
{
to ARG1 to reduce the number of tests below. */
if (commutative_tree_code (code)
&& tree_swap_operands_p (arg0, arg1, true))
- return fold (build2 (code, type, op1, op0));
+ return fold_build2 (code, type, op1, op0);
/* Now WINS is set as described above,
ARG0 is the first operand of EXPR,
|| (TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (arg0, 1)))))))
{
- tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
- : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
- : TRUTH_XOR_EXPR,
- type, fold_convert (boolean_type_node, arg0),
- fold_convert (boolean_type_node, arg1)));
+ tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
+ : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
+ : TRUTH_XOR_EXPR,
+ boolean_type_node,
+ fold_convert (boolean_type_node, arg0),
+ fold_convert (boolean_type_node, arg1));
if (code == EQ_EXPR)
tem = invert_truthvalue (tem);
- return tem;
+ return fold_convert (type, tem);
}
if (TREE_CODE_CLASS (code) == tcc_comparison
&& TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
+ fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
else if (TREE_CODE_CLASS (code) == tcc_comparison
&& TREE_CODE (arg1) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
- fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
+ fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
else if (TREE_CODE_CLASS (code) == tcc_binary
|| TREE_CODE_CLASS (code) == tcc_comparison)
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold (build2 (code, type, TREE_OPERAND (arg0, 1),
- arg1)));
+ fold_build2 (code, type, TREE_OPERAND (arg0, 1),
+ arg1));
if (TREE_CODE (arg1) == COMPOUND_EXPR
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
- fold (build2 (code, type,
- arg0, TREE_OPERAND (arg1, 1))));
+ fold_build2 (code, type,
+ arg0, TREE_OPERAND (arg1, 1)));
if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
{
case PLUS_EXPR:
/* A + (-B) -> A - B */
if (TREE_CODE (arg1) == NEGATE_EXPR)
- return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
+ return fold_build2 (MINUS_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, TREE_OPERAND (arg1, 0)));
/* (-A) + B -> B - A */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
- return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
+ return fold_build2 (MINUS_EXPR, type,
+ fold_convert (type, arg1),
+ fold_convert (type, TREE_OPERAND (arg0, 0)));
+ /* Convert ~A + 1 to -A. */
+ if (INTEGRAL_TYPE_P (type)
+ && TREE_CODE (arg0) == BIT_NOT_EXPR
+ && integer_onep (arg1))
+ return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
if (TREE_CODE (type) == COMPLEX_TYPE)
{
if (TREE_CODE (parg0) == MULT_EXPR
&& TREE_CODE (parg1) != MULT_EXPR)
- return fold (build2 (pcode, type,
- fold (build2 (PLUS_EXPR, type,
- fold_convert (type, parg0),
- fold_convert (type, marg))),
- fold_convert (type, parg1)));
+ return fold_build2 (pcode, type,
+ fold_build2 (PLUS_EXPR, type,
+ fold_convert (type, parg0),
+ fold_convert (type, marg)),
+ fold_convert (type, parg1));
if (TREE_CODE (parg0) != MULT_EXPR
&& TREE_CODE (parg1) == MULT_EXPR)
- return fold (build2 (PLUS_EXPR, type,
- fold_convert (type, parg0),
- fold (build2 (pcode, type,
- fold_convert (type, marg),
- fold_convert (type,
- parg1)))));
+ return fold_build2 (PLUS_EXPR, type,
+ fold_convert (type, parg0),
+ fold_build2 (pcode, type,
+ fold_convert (type, marg),
+ fold_convert (type,
+ parg1)));
}
if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
if (exact_log2 (int11) > 0 && int01 % int11 == 0)
{
- alt0 = fold (build2 (MULT_EXPR, type, arg00,
- build_int_cst (NULL_TREE,
- int01 / int11)));
+ alt0 = fold_build2 (MULT_EXPR, type, arg00,
+ build_int_cst (NULL_TREE,
+ int01 / int11));
alt1 = arg10;
same = arg11;
}
}
if (same)
- return fold (build2 (MULT_EXPR, type,
- fold (build2 (PLUS_EXPR, type,
- fold_convert (type, alt0),
- fold_convert (type, alt1))),
- same));
+ return fold_build2 (MULT_EXPR, type,
+ fold_build2 (PLUS_EXPR, type,
+ fold_convert (type, alt0),
+ fold_convert (type, alt1)),
+ fold_convert (type, same));
}
/* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
of the array. Loop optimizer sometimes produce this type of
expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (arg1) == MULT_EXPR)
+ if (TREE_CODE (arg0) == ADDR_EXPR)
{
tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
if (tem)
return fold_convert (type, fold (tem));
}
- else if (TREE_CODE (arg1) == ADDR_EXPR
- && TREE_CODE (arg0) == MULT_EXPR)
+ else if (TREE_CODE (arg1) == ADDR_EXPR)
{
tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
if (tem)
{
tem = fold_negate_const (arg1, type);
if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
- return fold (build2 (MINUS_EXPR, type,
- fold_convert (type, arg0),
- fold_convert (type, tem)));
+ return fold_build2 (MINUS_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, tem));
}
+ if (flag_unsafe_math_optimizations
+ && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
+ && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
+ && (tem = distribute_real_division (code, type, arg0, arg1)))
+ return tem;
+
/* Convert x+x into x*2.0. */
if (operand_equal_p (arg0, arg1, 0)
&& SCALAR_FLOAT_TYPE_P (type))
- return fold (build2 (MULT_EXPR, type, arg0,
- build_real (type, dconst2)));
+ return fold_build2 (MULT_EXPR, type, arg0,
+ build_real (type, dconst2));
/* Convert x*c+x into x*(c+1). */
if (flag_unsafe_math_optimizations
c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
- return fold (build2 (MULT_EXPR, type, arg1,
- build_real (type, c)));
+ return fold_build2 (MULT_EXPR, type, arg1,
+ build_real (type, c));
}
/* Convert x+x*c into x*(c+1). */
c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
- return fold (build2 (MULT_EXPR, type, arg0,
- build_real (type, c)));
+ return fold_build2 (MULT_EXPR, type, arg0,
+ build_real (type, c));
}
/* Convert x*c1+x*c2 into x*(c1+c2). */
c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
- return fold (build2 (MULT_EXPR, type,
- TREE_OPERAND (arg0, 0),
- build_real (type, c1)));
+ return fold_build2 (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ build_real (type, c1));
}
/* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
if (flag_unsafe_math_optimizations
&& TREE_CODE (tree10) == MULT_EXPR)
{
tree tree0;
- tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
- return fold (build2 (PLUS_EXPR, type, tree0, tree11));
+ tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
+ return fold_build2 (PLUS_EXPR, type, tree0, tree11);
}
}
/* Convert (b*c + d*e) + a into b*c + (d*e +a). */
&& TREE_CODE (tree00) == MULT_EXPR)
{
tree tree0;
- tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
- return fold (build2 (PLUS_EXPR, type, tree00, tree0));
+ tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
+ return fold_build2 (PLUS_EXPR, type, tree00, tree0);
}
}
}
return t1;
}
- return t;
+ return NULL_TREE;
case MINUS_EXPR:
/* A - (-B) -> A + B */
if (TREE_CODE (arg1) == NEGATE_EXPR)
- return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
+ return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
/* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& (FLOAT_TYPE_P (type)
|| (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
&& negate_expr_p (arg1)
&& reorder_operands_p (arg0, arg1))
- return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
- TREE_OPERAND (arg0, 0)));
+ return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
+ TREE_OPERAND (arg0, 0));
+ /* Convert -A - 1 to ~A. */
+ if (INTEGRAL_TYPE_P (type)
+ && TREE_CODE (arg0) == NEGATE_EXPR
+ && integer_onep (arg1))
+ return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
+
+ /* Convert -1 - A to ~A. */
+ if (INTEGRAL_TYPE_P (type)
+ && integer_all_onesp (arg0))
+ return fold_build1 (BIT_NOT_EXPR, type, arg1);
if (TREE_CODE (type) == COMPLEX_TYPE)
{
&& TREE_CODE (arg1) == BIT_AND_EXPR)
{
if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
- return fold (build2 (BIT_AND_EXPR, type,
- fold (build1 (BIT_NOT_EXPR, type,
- TREE_OPERAND (arg1, 0))),
- arg0));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_build1 (BIT_NOT_EXPR, type,
+ TREE_OPERAND (arg1, 0)),
+ arg0);
if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- return fold (build2 (BIT_AND_EXPR, type,
- fold (build1 (BIT_NOT_EXPR, type,
- TREE_OPERAND (arg1, 1))),
- arg0));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_build1 (BIT_NOT_EXPR, type,
+ TREE_OPERAND (arg1, 1)),
+ arg0);
}
/* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
{
tree mask0 = TREE_OPERAND (arg0, 1);
tree mask1 = TREE_OPERAND (arg1, 1);
- tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
+ tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
if (operand_equal_p (tem, mask1, 0))
{
- tem = fold (build2 (BIT_XOR_EXPR, type,
- TREE_OPERAND (arg0, 0), mask1));
- return fold (build2 (MINUS_EXPR, type, tem, mask1));
+ tem = fold_build2 (BIT_XOR_EXPR, type,
+ TREE_OPERAND (arg0, 0), mask1);
+ return fold_build2 (MINUS_EXPR, type, tem, mask1);
}
}
}
&& (TREE_CODE (arg1) != REAL_CST
|| REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
|| (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
- return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
+ return fold_build2 (PLUS_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, negate_expr (arg1)));
/* Try folding difference of addresses. */
{
&& ptr_difference_const (arg0, arg1, &diff))
return build_int_cst_type (type, diff);
}
-
+
+ /* Fold &a[i] - &a[j] to i-j. */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
+ && TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
+ {
+ tree aref0 = TREE_OPERAND (arg0, 0);
+ tree aref1 = TREE_OPERAND (arg1, 0);
+ if (operand_equal_p (TREE_OPERAND (aref0, 0),
+ TREE_OPERAND (aref1, 0), 0))
+ {
+ tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
+ tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
+ tree esz = array_ref_element_size (aref0);
+ tree diff = build2 (MINUS_EXPR, type, op0, op1);
+ return fold_build2 (MULT_EXPR, type, diff,
+ fold_convert (type, esz));
+
+ }
+ }
+
/* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
of the array. Loop optimizer sometimes produce this type of
expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (arg1) == MULT_EXPR)
+ if (TREE_CODE (arg0) == ADDR_EXPR)
{
tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
if (tem)
return fold_convert (type, fold (tem));
}
+ if (flag_unsafe_math_optimizations
+ && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
+ && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
+ && (tem = distribute_real_division (code, type, arg0, arg1)))
+ return tem;
+
if (TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (arg1) == MULT_EXPR
&& (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
/* (A * C) - (B * C) -> (A-B) * C. */
if (operand_equal_p (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 1), 0))
- return fold (build2 (MULT_EXPR, type,
- fold (build2 (MINUS_EXPR, type,
- TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0))),
- TREE_OPERAND (arg0, 1)));
+ return fold_build2 (MULT_EXPR, type,
+ fold_build2 (MINUS_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0)),
+ TREE_OPERAND (arg0, 1));
/* (A * C1) - (A * C2) -> A * (C1-C2). */
if (operand_equal_p (TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg1, 0), 0))
- return fold (build2 (MULT_EXPR, type,
- TREE_OPERAND (arg0, 0),
- fold (build2 (MINUS_EXPR, type,
- TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1)))));
+ return fold_build2 (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ fold_build2 (MINUS_EXPR, type,
+ TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1)));
}
goto associate;
case MULT_EXPR:
/* (-A) * (-B) -> A * B */
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
- return fold (build2 (MULT_EXPR, type,
- TREE_OPERAND (arg0, 0),
- negate_expr (arg1)));
+ return fold_build2 (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ negate_expr (arg1));
if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
- return fold (build2 (MULT_EXPR, type,
- negate_expr (arg0),
- TREE_OPERAND (arg1, 0)));
+ return fold_build2 (MULT_EXPR, type,
+ negate_expr (arg0),
+ TREE_OPERAND (arg1, 0));
if (TREE_CODE (type) == COMPLEX_TYPE)
{
return omit_one_operand (type, arg1, arg0);
if (integer_onep (arg1))
return non_lvalue (fold_convert (type, arg0));
+ /* Transform x * -1 into -x. */
+ if (integer_all_onesp (arg1))
+ return fold_convert (type, negate_expr (arg0));
/* (a * (1 << b)) is (a << b) */
if (TREE_CODE (arg1) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg1, 0)))
- return fold (build2 (LSHIFT_EXPR, type, arg0,
- TREE_OPERAND (arg1, 1)));
+ return fold_build2 (LSHIFT_EXPR, type, arg0,
+ TREE_OPERAND (arg1, 1));
if (TREE_CODE (arg0) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg0, 0)))
- return fold (build2 (LSHIFT_EXPR, type, arg1,
- TREE_OPERAND (arg0, 1)));
+ return fold_build2 (LSHIFT_EXPR, type, arg1,
+ TREE_OPERAND (arg0, 1));
if (TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = extract_muldiv (op0,
tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
arg1, 0);
if (tem)
- return fold (build2 (RDIV_EXPR, type, tem,
- TREE_OPERAND (arg0, 1)));
+ return fold_build2 (RDIV_EXPR, type, tem,
+ TREE_OPERAND (arg0, 1));
}
/* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
if (tem != NULL_TREE)
{
tem = fold_convert (type, tem);
- return fold (build2 (MULT_EXPR, type, tem, tem));
+ return fold_build2 (MULT_EXPR, type, tem, tem);
}
}
/* Optimize root(x)*root(y) as root(x*y). */
rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
- arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
+ arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
arglist = build_tree_list (NULL_TREE, arg);
return build_function_call_expr (rootfn, arglist);
}
if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
{
tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
- tree arg = build2 (PLUS_EXPR, type,
- TREE_VALUE (TREE_OPERAND (arg0, 1)),
- TREE_VALUE (TREE_OPERAND (arg1, 1)));
- tree arglist = build_tree_list (NULL_TREE, fold (arg));
+ tree arg = fold_build2 (PLUS_EXPR, type,
+ TREE_VALUE (TREE_OPERAND (arg0, 1)),
+ TREE_VALUE (TREE_OPERAND (arg1, 1)));
+ tree arglist = build_tree_list (NULL_TREE, arg);
return build_function_call_expr (expfn, arglist);
}
if (operand_equal_p (arg01, arg11, 0))
{
tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
- tree arg = build2 (MULT_EXPR, type, arg00, arg10);
- tree arglist = tree_cons (NULL_TREE, fold (arg),
+ tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
+ tree arglist = tree_cons (NULL_TREE, arg,
build_tree_list (NULL_TREE,
arg01));
return build_function_call_expr (powfn, arglist);
if (operand_equal_p (arg00, arg10, 0))
{
tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
- tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
+ tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
tree arglist = tree_cons (NULL_TREE, arg00,
build_tree_list (NULL_TREE,
arg));
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
{
- return fold (build1 (BIT_NOT_EXPR, type,
- build2 (BIT_AND_EXPR, type,
- TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0))));
+ return fold_build1 (BIT_NOT_EXPR, type,
+ build2 (BIT_AND_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0)));
}
/* See if this can be simplified into a rotate first. If that
if (integer_zerop (arg1))
return non_lvalue (fold_convert (type, arg0));
if (integer_all_onesp (arg1))
- return fold (build1 (BIT_NOT_EXPR, type, arg0));
+ return fold_build1 (BIT_NOT_EXPR, type, arg0);
if (operand_equal_p (arg0, arg1, 0))
return omit_one_operand (type, integer_zero_node, arg0);
goto bit_ior;
}
+ /* Convert ~X ^ ~Y to X ^ Y. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && TREE_CODE (arg1) == BIT_NOT_EXPR)
+ return fold_build2 (code, type,
+ fold_convert (type, TREE_OPERAND (arg0, 0)),
+ fold_convert (type, TREE_OPERAND (arg1, 0)));
+
/* See if this can be simplified into a rotate first. If that
is unsuccessful continue in the association code. */
goto bit_rotate;
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
{
- return fold (build1 (BIT_NOT_EXPR, type,
- build2 (BIT_IOR_EXPR, type,
- TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0))));
+ return fold_build1 (BIT_NOT_EXPR, type,
+ build2 (BIT_IOR_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0)));
}
goto associate;
if (TREE_CODE (arg1) == REAL_CST
&& !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
&& real_zerop (arg1))
- return t;
+ return NULL_TREE;
/* (-A) / (-B) -> A / B */
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
- return fold (build2 (RDIV_EXPR, type,
- TREE_OPERAND (arg0, 0),
- negate_expr (arg1)));
+ return fold_build2 (RDIV_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ negate_expr (arg1));
if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
- return fold (build2 (RDIV_EXPR, type,
- negate_expr (arg0),
- TREE_OPERAND (arg1, 0)));
+ return fold_build2 (RDIV_EXPR, type,
+ negate_expr (arg0),
+ TREE_OPERAND (arg1, 0));
/* In IEEE floating point, x/1 is not equivalent to x for snans. */
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
if (flag_unsafe_math_optimizations
&& 0 != (tem = const_binop (code, build_real (type, dconst1),
arg1, 0)))
- return fold (build2 (MULT_EXPR, type, arg0, tem));
+ return fold_build2 (MULT_EXPR, type, arg0, tem);
/* Find the reciprocal if optimizing and the result is exact. */
if (optimize)
{
if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
{
tem = build_real (type, r);
- return fold (build2 (MULT_EXPR, type, arg0, tem));
+ return fold_build2 (MULT_EXPR, type,
+ fold_convert (type, arg0), tem);
}
}
}
/* Convert A/B/C to A/(B*C). */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg0) == RDIV_EXPR)
- return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
- fold (build2 (MULT_EXPR, type,
- TREE_OPERAND (arg0, 1), arg1))));
+ return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
+ fold_build2 (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 1), arg1));
/* Convert A/(B/C) to (A/B)*C. */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg1) == RDIV_EXPR)
- return fold (build2 (MULT_EXPR, type,
- fold (build2 (RDIV_EXPR, type, arg0,
- TREE_OPERAND (arg1, 0))),
- TREE_OPERAND (arg1, 1)));
+ return fold_build2 (MULT_EXPR, type,
+ fold_build2 (RDIV_EXPR, type, arg0,
+ TREE_OPERAND (arg1, 0)),
+ TREE_OPERAND (arg1, 1));
/* Convert C1/(X*C2) into (C1/C2)/X. */
if (flag_unsafe_math_optimizations
tree tem = const_binop (RDIV_EXPR, arg0,
TREE_OPERAND (arg1, 1), 0);
if (tem)
- return fold (build2 (RDIV_EXPR, type, tem,
- TREE_OPERAND (arg1, 0)));
+ return fold_build2 (RDIV_EXPR, type, tem,
+ TREE_OPERAND (arg1, 0));
}
if (TREE_CODE (type) == COMPLEX_TYPE)
tree arglist = build_tree_list (NULL_TREE,
fold_convert (type, arg));
arg1 = build_function_call_expr (expfn, arglist);
- return fold (build2 (MULT_EXPR, type, arg0, arg1));
+ return fold_build2 (MULT_EXPR, type, arg0, arg1);
}
/* Optimize x/pow(y,z) into x*pow(y,-z). */
tree arglist = tree_cons(NULL_TREE, arg10,
build_tree_list (NULL_TREE, neg11));
arg1 = build_function_call_expr (powfn, arglist);
- return fold (build2 (MULT_EXPR, type, arg0, arg1));
+ return fold_build2 (MULT_EXPR, type, arg0, arg1);
}
}
{
tree tmp = TREE_OPERAND (arg0, 1);
tmp = build_function_call_expr (tanfn, tmp);
- return fold (build2 (RDIV_EXPR, type,
- build_real (type, dconst1), tmp));
+ return fold_build2 (RDIV_EXPR, type,
+ build_real (type, dconst1), tmp);
}
}
if (integer_onep (arg1))
return non_lvalue (fold_convert (type, arg0));
if (integer_zerop (arg1))
- return t;
+ return NULL_TREE;
/* X / -1 is -X. */
if (!TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
after the last round to changes to the DIV code in expmed.c. */
if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
&& multiple_of_p (type, arg0, arg1))
- return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
+ return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
if (TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
/* X % 0, return X % 0 unchanged so that we can get the
proper warnings and errors. */
if (integer_zerop (arg1))
- return t;
+ return NULL_TREE;
/* 0 % X is always zero, but be sure to preserve any side
effects in X. Place this after checking for X == 0. */
}
mask = build_int_cst_wide (type, low, high);
- return fold (build2 (BIT_AND_EXPR, type,
- fold_convert (type, arg0), mask));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_convert (type, arg0), mask);
}
/* X % -C is the same as X % C. */
if (code == TRUNC_MOD_EXPR
&& !TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
+ && !TREE_CONSTANT_OVERFLOW (arg1)
&& TREE_INT_CST_HIGH (arg1) < 0
&& !flag_trapv
/* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
&& !sign_bit_p (arg1, arg1))
- return fold (build2 (code, type, fold_convert (type, arg0),
- fold_convert (type, negate_expr (arg1))));
+ return fold_build2 (code, type, fold_convert (type, arg0),
+ fold_convert (type, negate_expr (arg1)));
/* X % -Y is the same as X % Y. */
if (code == TRUNC_MOD_EXPR
&& !TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == NEGATE_EXPR
&& !flag_trapv)
- return fold (build2 (code, type, fold_convert (type, arg0),
- fold_convert (type, TREE_OPERAND (arg1, 0))));
+ return fold_build2 (code, type, fold_convert (type, arg0),
+ fold_convert (type, TREE_OPERAND (arg1, 0)));
if (TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
/* Since negative shift count is not well-defined,
don't try to compute it in the compiler. */
if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
- return t;
+ return NULL_TREE;
/* Rewrite an LROTATE_EXPR by a constant into an
RROTATE_EXPR by a new constant. */
if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
GET_MODE_BITSIZE (TYPE_MODE (type)));
tem = fold_convert (TREE_TYPE (arg1), tem);
tem = const_binop (MINUS_EXPR, tem, arg1, 0);
- return fold (build2 (RROTATE_EXPR, type, arg0, tem));
+ return fold_build2 (RROTATE_EXPR, type, arg0, tem);
}
/* If we have a rotate of a bit operation with the rotate count and
|| TREE_CODE (arg0) == BIT_IOR_EXPR
|| TREE_CODE (arg0) == BIT_XOR_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- return fold (build2 (TREE_CODE (arg0), type,
- fold (build2 (code, type,
- TREE_OPERAND (arg0, 0), arg1)),
- fold (build2 (code, type,
- TREE_OPERAND (arg0, 1), arg1))));
+ return fold_build2 (TREE_CODE (arg0), type,
+ fold_build2 (code, type,
+ TREE_OPERAND (arg0, 0), arg1),
+ fold_build2 (code, type,
+ TREE_OPERAND (arg0, 1), arg1));
/* Two consecutive rotates adding up to the width of the mode can
be ignored. */
{
tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
if (tem)
- return fold (build2 (code, type, tem, arg1));
+ return fold_build2 (code, type, tem, arg1);
tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
if (tem)
- return fold (build2 (code, type, arg0, tem));
+ return fold_build2 (code, type, arg0, tem);
}
truth_andor:
/* We only do these simplifications if we are optimizing. */
if (!optimize)
- return t;
+ return NULL_TREE;
/* Check for things like (A || B) && (A || C). We can convert this
to A || (B && C). Note that either operator can be any of the four
|| code == TRUTH_OR_EXPR));
if (operand_equal_p (a00, a10, 0))
- return fold (build2 (TREE_CODE (arg0), type, a00,
- fold (build2 (code, type, a01, a11))));
+ return fold_build2 (TREE_CODE (arg0), type, a00,
+ fold_build2 (code, type, a01, a11));
else if (commutative && operand_equal_p (a00, a11, 0))
- return fold (build2 (TREE_CODE (arg0), type, a00,
- fold (build2 (code, type, a01, a10))));
+ return fold_build2 (TREE_CODE (arg0), type, a00,
+ fold_build2 (code, type, a01, a10));
else if (commutative && operand_equal_p (a01, a10, 0))
- return fold (build2 (TREE_CODE (arg0), type, a01,
- fold (build2 (code, type, a00, a11))));
+ return fold_build2 (TREE_CODE (arg0), type, a01,
+ fold_build2 (code, type, a00, a11));
/* This case if tricky because we must either have commutative
operators or else A10 must not have side-effects. */
else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
&& operand_equal_p (a01, a11, 0))
- return fold (build2 (TREE_CODE (arg0), type,
- fold (build2 (code, type, a00, a10)),
- a01));
+ return fold_build2 (TREE_CODE (arg0), type,
+ fold_build2 (code, type, a00, a10),
+ a01);
}
/* See if we can build a range comparison. */
if (TREE_CODE (arg0) == code
&& 0 != (tem = fold_truthop (code, type,
TREE_OPERAND (arg0, 1), arg1)))
- return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
return tem;
- return t;
+ return NULL_TREE;
case TRUTH_ORIF_EXPR:
/* Note that the operands of this must be ints
return non_lvalue (fold_convert (type, arg0));
/* If the second arg is constant true, this is a logical inversion. */
if (integer_onep (arg1))
- return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
+ {
+ /* Only call invert_truthvalue if operand is a truth value. */
+ if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
+ tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
+ else
+ tem = invert_truthvalue (arg0);
+ return non_lvalue (fold_convert (type, tem));
+ }
/* Identical arguments cancel to zero. */
if (operand_equal_p (arg0, arg1, 0))
return omit_one_operand (type, integer_zero_node, arg0);
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand (type, integer_one_node, arg0);
- return t;
+ return NULL_TREE;
case EQ_EXPR:
case NE_EXPR:
case LT_EXPR:
case GT_EXPR:
case LE_EXPR:
- case GE_EXPR:
+ case GE_EXPR:
/* If one arg is a real or integer constant, put it last. */
if (tree_swap_operands_p (arg0, arg1, true))
- return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
+ return fold_build2 (swap_tree_comparison (code), type, op1, op0);
+
+ /* bool_var != 0 becomes bool_var. */
+ if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
+ && code == NE_EXPR)
+ return non_lvalue (fold_convert (type, arg0));
+
+ /* bool_var == 1 becomes bool_var. */
+ if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
+ && code == EQ_EXPR)
+ return non_lvalue (fold_convert (type, arg0));
/* If this is an equality comparison of the address of a non-weak
object against zero, then we know the result. */
&& extract_array_ref (arg1, &base1, &offset1)
&& operand_equal_p (base0, base1, 0))
{
+ if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
+ && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
+ offset0 = NULL_TREE;
+ if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
+ && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
+ offset1 = NULL_TREE;
if (offset0 == NULL_TREE
&& offset1 == NULL_TREE)
{
offset1 = build_int_cst (TREE_TYPE (offset0), 0);
if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
- return fold (build2 (code, type, offset0, offset1));
+ return fold_build2 (code, type, offset0, offset1);
+ }
+ }
+
+ /* Transform comparisons of the form X +- C CMP X. */
+ if ((code != EQ_EXPR && code != NE_EXPR)
+ && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
+ || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TYPE_UNSIGNED (TREE_TYPE (arg1))
+ && !(flag_wrapv || flag_trapv))))
+ {
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ enum tree_code code0 = TREE_CODE (arg0);
+ int is_positive;
+
+ if (TREE_CODE (arg01) == REAL_CST)
+ is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
+ else
+ is_positive = tree_int_cst_sgn (arg01);
+
+ /* (X - c) > X becomes false. */
+ if (code == GT_EXPR
+ && ((code0 == MINUS_EXPR && is_positive >= 0)
+ || (code0 == PLUS_EXPR && is_positive <= 0)))
+ return constant_boolean_node (0, type);
+
+ /* Likewise (X + c) < X becomes false. */
+ if (code == LT_EXPR
+ && ((code0 == PLUS_EXPR && is_positive >= 0)
+ || (code0 == MINUS_EXPR && is_positive <= 0)))
+ return constant_boolean_node (0, type);
+
+ /* Convert (X - c) <= X to true. */
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
+ && code == LE_EXPR
+ && ((code0 == MINUS_EXPR && is_positive >= 0)
+ || (code0 == PLUS_EXPR && is_positive <= 0)))
+ return constant_boolean_node (1, type);
+
+ /* Convert (X + c) >= X to true. */
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
+ && code == GE_EXPR
+ && ((code0 == PLUS_EXPR && is_positive >= 0)
+ || (code0 == MINUS_EXPR && is_positive <= 0)))
+ return constant_boolean_node (1, type);
+
+ if (TREE_CODE (arg01) == INTEGER_CST)
+ {
+ /* Convert X + c > X and X - c < X to true for integers. */
+ if (code == GT_EXPR
+ && ((code0 == PLUS_EXPR && is_positive > 0)
+ || (code0 == MINUS_EXPR && is_positive < 0)))
+ return constant_boolean_node (1, type);
+
+ if (code == LT_EXPR
+ && ((code0 == MINUS_EXPR && is_positive > 0)
+ || (code0 == PLUS_EXPR && is_positive < 0)))
+ return constant_boolean_node (1, type);
+
+ /* Convert X + c <= X and X - c >= X to false for integers. */
+ if (code == LE_EXPR
+ && ((code0 == PLUS_EXPR && is_positive > 0)
+ || (code0 == MINUS_EXPR && is_positive < 0)))
+ return constant_boolean_node (0, type);
+
+ if (code == GE_EXPR
+ && ((code0 == MINUS_EXPR && is_positive > 0)
+ || (code0 == PLUS_EXPR && is_positive < 0)))
+ return constant_boolean_node (0, type);
}
}
/* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
- return fold (build2 (code, type, fold_convert (newtype, targ0),
- fold_convert (newtype, targ1)));
+ return fold_build2 (code, type, fold_convert (newtype, targ0),
+ fold_convert (newtype, targ1));
/* (-a) CMP (-b) -> b CMP a */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& TREE_CODE (arg1) == NEGATE_EXPR)
- return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
- TREE_OPERAND (arg0, 0)));
+ return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
+ TREE_OPERAND (arg0, 0));
if (TREE_CODE (arg1) == REAL_CST)
{
/* (-a) CMP CST -> a swap(CMP) (-CST) */
if (TREE_CODE (arg0) == NEGATE_EXPR)
return
- fold (build2 (swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 0),
- build_real (TREE_TYPE (arg1),
- REAL_VALUE_NEGATE (cst))));
+ fold_build2 (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 0),
+ build_real (TREE_TYPE (arg1),
+ REAL_VALUE_NEGATE (cst)));
/* IEEE doesn't distinguish +0 and -0 in comparisons. */
/* a CMP (-0) -> a CMP 0 */
if (REAL_VALUE_MINUS_ZERO (cst))
- return fold (build2 (code, type, arg0,
- build_real (TREE_TYPE (arg1), dconst0)));
+ return fold_build2 (code, type, arg0,
+ build_real (TREE_TYPE (arg1), dconst0));
/* x != NaN is always true, other ops are always false. */
if (REAL_VALUE_ISNAN (cst)
? MINUS_EXPR : PLUS_EXPR,
arg1, TREE_OPERAND (arg0, 1), 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
- return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Likewise, we can simplify a comparison of a real constant with
a MINUS_EXPR whose first operand is also a real constant, i.e.
&& 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
arg1, 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
- return fold (build2 (swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 1), tem));
+ return fold_build2 (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 1), tem);
/* Fold comparisons against built-in math functions. */
if (TREE_CODE (arg1) == REAL_CST
if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
{
- newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
- arg1, TREE_OPERAND (arg0, 1)));
+ newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
+ arg1, TREE_OPERAND (arg0, 1));
varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
}
else
{
- newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
- arg1, TREE_OPERAND (arg0, 1)));
+ newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
+ arg1, TREE_OPERAND (arg0, 1));
varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
/* First check whether the comparison would come out
always the same. If we don't do that we would
change the meaning with the masking. */
- folded_compare = fold (build2 (code, type,
- TREE_OPERAND (varop, 0), arg1));
+ folded_compare = fold_build2 (code, type,
+ TREE_OPERAND (varop, 0), arg1);
if (integer_zerop (folded_compare)
|| integer_onep (folded_compare))
return omit_one_operand (type, folded_compare, varop);
shift = build_int_cst (NULL_TREE,
TYPE_PRECISION (TREE_TYPE (varop)) - size);
shift = fold_convert (TREE_TYPE (varop), shift);
- newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
- newconst, shift));
- newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
- newconst, shift));
+ newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
+ newconst, shift);
+ newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
+ newconst, shift);
}
- return fold (build2 (code, type, varop, newconst));
+ return fold_build2 (code, type, varop, newconst);
}
/* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
switch (code)
{
case GE_EXPR:
- arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold (build2 (GT_EXPR, type, arg0, arg1));
+ arg1 = const_binop (MINUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1), 0);
+ return fold_build2 (GT_EXPR, type, arg0,
+ fold_convert (TREE_TYPE (arg0), arg1));
case LT_EXPR:
- arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold (build2 (LE_EXPR, type, arg0, arg1));
+ arg1 = const_binop (MINUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1), 0);
+ return fold_build2 (LE_EXPR, type, arg0,
+ fold_convert (TREE_TYPE (arg0), arg1));
default:
break;
}
/* Comparisons with the highest or lowest possible integer of
- the specified size will have known values.
-
- This is quite similar to fold_relational_hi_lo, however,
- attempts to share the code have been nothing but trouble. */
+ the specified size will have known values. */
{
int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
return omit_one_operand (type, integer_zero_node, arg0);
case GE_EXPR:
- return fold (build2 (EQ_EXPR, type, arg0, arg1));
+ return fold_build2 (EQ_EXPR, type, arg0, arg1);
case LE_EXPR:
return omit_one_operand (type, integer_one_node, arg0);
case LT_EXPR:
- return fold (build2 (NE_EXPR, type, arg0, arg1));
+ return fold_build2 (NE_EXPR, type, arg0, arg1);
/* The GE_EXPR and LT_EXPR cases above are not normally
reached because of previous transformations. */
{
case GT_EXPR:
arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
- return fold (build2 (EQ_EXPR, type, arg0, arg1));
+ return fold_build2 (EQ_EXPR, type, arg0, arg1);
case LE_EXPR:
arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
- return fold (build2 (NE_EXPR, type, arg0, arg1));
+ return fold_build2 (NE_EXPR, type, arg0, arg1);
default:
break;
}
return omit_one_operand (type, integer_zero_node, arg0);
case LE_EXPR:
- return fold (build2 (EQ_EXPR, type, arg0, arg1));
+ return fold_build2 (EQ_EXPR, type, arg0, arg1);
case GE_EXPR:
return omit_one_operand (type, integer_one_node, arg0);
case GT_EXPR:
- return fold (build2 (NE_EXPR, type, arg0, arg1));
+ return fold_build2 (NE_EXPR, type, arg0, arg1);
default:
break;
{
case GE_EXPR:
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold (build2 (NE_EXPR, type, arg0, arg1));
+ return fold_build2 (NE_EXPR, type, arg0, arg1);
case LT_EXPR:
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold (build2 (EQ_EXPR, type, arg0, arg1));
+ return fold_build2 (EQ_EXPR, type, arg0, arg1);
default:
break;
}
? MINUS_EXPR : PLUS_EXPR,
arg1, TREE_OPERAND (arg0, 1), 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
- return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Similarly for a NEGATE_EXPR. */
else if ((code == EQ_EXPR || code == NE_EXPR)
&& 0 != (tem = negate_expr (arg1))
&& TREE_CODE (tem) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (tem))
- return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* If we have X - Y == 0, we can convert that to X == Y and similarly
for !=. Don't do this for ordered comparisons due to overflow. */
else if ((code == NE_EXPR || code == EQ_EXPR)
&& integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
- return fold (build2 (code, type,
- TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
+ return fold_build2 (code, type,
+ TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
- && TREE_CODE (arg0) == NOP_EXPR)
+ && (TREE_CODE (arg0) == NOP_EXPR
+ || TREE_CODE (arg0) == CONVERT_EXPR))
{
/* If we are widening one operand of an integer comparison,
see if the other operand is similarly being widened. Perhaps we
if (tem)
return tem;
- return t;
+ return NULL_TREE;
}
/* If we are comparing an ABS_EXPR with a constant, we can
&& (0 != (tem = negate_expr (arg1)))
&& TREE_CODE (tem) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (tem))
- return fold (build2 (TRUTH_ANDIF_EXPR, type,
- build2 (GE_EXPR, type,
- TREE_OPERAND (arg0, 0), tem),
- build2 (LE_EXPR, type,
- TREE_OPERAND (arg0, 0), arg1)));
+ return fold_build2 (TRUTH_ANDIF_EXPR, type,
+ build2 (GE_EXPR, type,
+ TREE_OPERAND (arg0, 0), tem),
+ build2 (LE_EXPR, type,
+ TREE_OPERAND (arg0, 0), arg1));
/* Convert ABS_EXPR<x> >= 0 to true. */
else if (code == GE_EXPR
else if ((code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg0) == ABS_EXPR
&& (integer_zerop (arg1) || real_zerop (arg1)))
- return fold (build2 (code, type, TREE_OPERAND (arg0, 0), arg1));
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
/* If this is an EQ or NE comparison with zero and ARG0 is
(1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
if (TREE_CODE (arg00) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg00, 0)))
return
- fold (build2 (code, type,
- build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
- arg01, TREE_OPERAND (arg00, 1)),
- fold_convert (TREE_TYPE (arg0),
- integer_one_node)),
- arg1));
+ fold_build2 (code, type,
+ build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
+ arg01, TREE_OPERAND (arg00, 1)),
+ fold_convert (TREE_TYPE (arg0),
+ integer_one_node)),
+ arg1);
else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
return
- fold (build2 (code, type,
- build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
- arg00, TREE_OPERAND (arg01, 1)),
- fold_convert (TREE_TYPE (arg0),
- integer_one_node)),
- arg1));
+ fold_build2 (code, type,
+ build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
+ arg00, TREE_OPERAND (arg01, 1)),
+ fold_convert (TREE_TYPE (arg0),
+ integer_one_node)),
+ arg1);
}
/* If this is an NE or EQ comparison of zero against the result of a
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
- tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
- fold_convert (newtype,
- TREE_OPERAND (arg0, 0)),
- fold_convert (newtype,
- TREE_OPERAND (arg0, 1))));
+ tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
+ fold_convert (newtype,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert (newtype,
+ TREE_OPERAND (arg0, 1)));
- return fold (build2 (code, type, newmod,
- fold_convert (newtype, arg1)));
+ return fold_build2 (code, type, newmod,
+ fold_convert (newtype, arg1));
}
/* If this is an NE comparison of zero with an AND of one, remove the
&& TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_pow2p (TREE_OPERAND (arg0, 1))
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
- arg0, fold_convert (TREE_TYPE (arg0),
- integer_zero_node)));
+ return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
+ arg0, fold_convert (TREE_TYPE (arg0),
+ integer_zero_node));
- /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
- 2, then fold the expression into shifts and logical operations. */
- tem = fold_single_bit_test (code, arg0, arg1, type);
+ /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
+ bit, then fold the expression into A < 0 or A >= 0. */
+ tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
if (tem)
return tem;
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree notc = fold (build1 (BIT_NOT_EXPR,
- TREE_TYPE (TREE_OPERAND (arg0, 1)),
- TREE_OPERAND (arg0, 1)));
- tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- arg1, notc));
+ tree notc = fold_build1 (BIT_NOT_EXPR,
+ TREE_TYPE (TREE_OPERAND (arg0, 1)),
+ TREE_OPERAND (arg0, 1));
+ tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ arg1, notc);
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (dandnotc))
return omit_one_operand (type, rslt, arg0);
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
- tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 1), notd));
+ tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
+ tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 1), notd);
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (candnotd))
return omit_one_operand (type, rslt, arg0);
if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
|| ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
return constant_boolean_node (1, type);
- return fold (build2 (EQ_EXPR, type, arg0, arg1));
+ return fold_build2 (EQ_EXPR, type, arg0, arg1);
case NE_EXPR:
/* For NE, we can only do this simplification if integer
was the same as ARG1. */
tree high_result
- = fold (build2 (code, type,
- eval_subst (arg0, cval1, maxval,
- cval2, minval),
- arg1));
+ = fold_build2 (code, type,
+ eval_subst (arg0, cval1, maxval,
+ cval2, minval),
+ arg1);
tree equal_result
- = fold (build2 (code, type,
- eval_subst (arg0, cval1, maxval,
- cval2, maxval),
- arg1));
+ = fold_build2 (code, type,
+ eval_subst (arg0, cval1, maxval,
+ cval2, maxval),
+ arg1);
tree low_result
- = fold (build2 (code, type,
- eval_subst (arg0, cval1, minval,
- cval2, maxval),
- arg1));
+ = fold_build2 (code, type,
+ eval_subst (arg0, cval1, minval,
+ cval2, maxval),
+ arg1);
/* All three of these results should be 0 or 1. Confirm they
are. Then use those values to select the proper code
return omit_one_operand (type, integer_one_node, arg0);
}
- tem = build2 (code, type, cval1, cval2);
if (save_p)
- return save_expr (tem);
+ return save_expr (build2 (code, type, cval1, cval2));
else
- return fold (tem);
+ return fold_build2 (code, type, cval1, cval2);
}
}
}
return t1;
}
+ /* Fold a comparison of the address of COMPONENT_REFs with the same
+ type and component to a comparison of the address of the base
+ object. In short, &x->a OP &y->a to x OP y and
+ &x->a OP &y.a to x OP &y */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
+ && TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
+ {
+ tree cref0 = TREE_OPERAND (arg0, 0);
+ tree cref1 = TREE_OPERAND (arg1, 0);
+ if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
+ {
+ tree op0 = TREE_OPERAND (cref0, 0);
+ tree op1 = TREE_OPERAND (cref1, 0);
+ return fold_build2 (code, type,
+ build_fold_addr_expr (op0),
+ build_fold_addr_expr (op1));
+ }
+ }
+
/* If this is a comparison of complex values and either or both sides
are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
arg0 = save_expr (arg0);
arg1 = save_expr (arg1);
- real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
- imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
- real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
- imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
+ real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
+ imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
+ real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
+ imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
- return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
- : TRUTH_ORIF_EXPR),
- type,
- fold (build2 (code, type, real0, real1)),
- fold (build2 (code, type, imag0, imag1))));
+ return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
+ : TRUTH_ORIF_EXPR),
+ type,
+ fold_build2 (code, type, real0, real1),
+ fold_build2 (code, type, imag0, imag1));
}
/* Optimize comparisons of strlen vs zero to a compare of the
&& (arglist = TREE_OPERAND (arg0, 1))
&& TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
&& ! TREE_CHAIN (arglist))
- return fold (build2 (code, type,
- build1 (INDIRECT_REF, char_type_node,
- TREE_VALUE (arglist)),
- fold_convert (char_type_node,
- integer_zero_node)));
+ {
+ tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
+ return fold_build2 (code, type, iref,
+ build_int_cst (TREE_TYPE (iref), 0));
+ }
}
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
return constant_boolean_node (code==NE_EXPR, type);
t1 = fold_relational_const (code, type, arg0, arg1);
- return t1 == NULL_TREE ? t : t1;
+ return t1 == NULL_TREE ? NULL_TREE : t1;
case UNORDERED_EXPR:
case ORDERED_EXPR:
newtype = TREE_TYPE (targ1);
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
- return fold (build2 (code, type, fold_convert (newtype, targ0),
- fold_convert (newtype, targ1)));
+ return fold_build2 (code, type, fold_convert (newtype, targ0),
+ fold_convert (newtype, targ1));
}
- return t;
+ return NULL_TREE;
case COMPOUND_EXPR:
/* When pedantic, a compound expression can be neither an lvalue
nor an integer constant expression. */
if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
- return t;
+ return NULL_TREE;
/* Don't let (0, 0) be null pointer constant. */
tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
: fold_convert (type, arg1);
case COMPLEX_EXPR:
if (wins)
return build_complex (type, arg0, arg1);
- return t;
+ return NULL_TREE;
+
+ case ASSERT_EXPR:
+ /* An ASSERT_EXPR should never be passed to fold_binary. */
+ gcc_unreachable ();
default:
- return t;
+ return NULL_TREE;
} /* switch (code) */
}
-/* Fold a ternary expression EXPR. Return the folded expression if
- folding is successful. Otherwise, return the original
- expression. */
+/* Callback for walk_tree, looking for LABEL_EXPR.
+ Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
+ Do not check the sub-tree of GOTO_EXPR. */
static tree
-fold_ternary (tree expr)
+contains_label_1 (tree *tp,
+ int *walk_subtrees,
+ void *data ATTRIBUTE_UNUSED)
+{
+ switch (TREE_CODE (*tp))
+ {
+ case LABEL_EXPR:
+ return *tp;
+ case GOTO_EXPR:
+ *walk_subtrees = 0;
+ /* no break */
+ default:
+ return NULL_TREE;
+ }
+}
+
+/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
+ accessible from outside the sub-tree. Returns NULL_TREE if no
+ addressable label is found. */
+
+static bool
+contains_label_p (tree st)
+{
+ return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
+}
+
+/* Fold a ternary expression of code CODE and type TYPE with operands
+ OP0, OP1, and OP2. Return the folded expression if folding is
+ successful. Otherwise, return NULL_TREE. */
+
+tree
+fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
{
- const tree t = expr;
- const tree type = TREE_TYPE (expr);
tree tem;
- tree op0, op1, op2;
tree arg0 = NULL_TREE, arg1 = NULL_TREE;
- enum tree_code code = TREE_CODE (t);
enum tree_code_class kind = TREE_CODE_CLASS (code);
gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 3);
- op0 = TREE_OPERAND (t, 0);
- op1 = TREE_OPERAND (t, 1);
- op2 = TREE_OPERAND (t, 2);
-
/* Strip any conversions that don't change the mode. This is safe
for every expression, except for a comparison expression because
its signedness is derived from its operands. So, in the latter
if (m)
return TREE_VALUE (m);
}
- return t;
+ return NULL_TREE;
case COND_EXPR:
/* Pedantic ANSI C says that a conditional expression is never an lvalue,
so all simple results must be passed through pedantic_non_lvalue. */
if (TREE_CODE (arg0) == INTEGER_CST)
{
+ tree unused_op = integer_zerop (arg0) ? op1 : op2;
tem = integer_zerop (arg0) ? op2 : op1;
/* Only optimize constant conditions when the selected branch
has the same type as the COND_EXPR. This avoids optimizing
- away "c ? x : throw", where the throw has a void type. */
- if (! VOID_TYPE_P (TREE_TYPE (tem))
- || VOID_TYPE_P (type))
+ away "c ? x : throw", where the throw has a void type.
+ Avoid throwing away that operand which contains label. */
+ if ((!TREE_SIDE_EFFECTS (unused_op)
+ || !contains_label_p (unused_op))
+ && (! VOID_TYPE_P (TREE_TYPE (tem))
+ || VOID_TYPE_P (type)))
return pedantic_non_lvalue (tem);
- return t;
+ return NULL_TREE;
}
if (operand_equal_p (arg1, op2, 0))
return pedantic_omit_one_operand (type, arg1, arg0);
tem = invert_truthvalue (arg0);
if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold (build3 (code, type, tem, op2, op1));
+ return fold_build3 (code, type, tem, op2, op1);
}
/* Convert A ? 1 : 0 to simply A. */
&& integer_zerop (TREE_OPERAND (arg0, 1))
&& integer_zerop (op2)
&& (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
- return fold_convert (type, fold (build2 (BIT_AND_EXPR,
- TREE_TYPE (tem), tem, arg1)));
+ return fold_convert (type, fold_build2 (BIT_AND_EXPR,
+ TREE_TYPE (tem), tem, arg1));
/* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
already handled above. */
&& TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
&& (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
- return fold (build2 (BIT_AND_EXPR, type,
- TREE_OPERAND (tem, 0), arg1));
+ return fold_build2 (BIT_AND_EXPR, type,
+ TREE_OPERAND (tem, 0), arg1);
}
/* A & N ? N : 0 is simply A & N if N is a power of two. This
if (integer_zerop (op2)
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (arg1)))
- return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
+ return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
/* Convert A ? B : 1 into !A || B if A and B are truth values. */
if (integer_onep (op2)
/* Only perform transformation if ARG0 is easily inverted. */
tem = invert_truthvalue (arg0);
if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
+ return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
}
/* Convert A ? 0 : B into !A && B if A and B are truth values. */
/* Only perform transformation if ARG0 is easily inverted. */
tem = invert_truthvalue (arg0);
if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold (build2 (TRUTH_ANDIF_EXPR, type, tem, op2));
+ return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
}
/* Convert A ? 1 : B into A || B if A and B are truth values. */
if (integer_onep (arg1)
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (op2)))
- return fold (build2 (TRUTH_ORIF_EXPR, type, arg0, op2));
+ return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
- return t;
+ return NULL_TREE;
case CALL_EXPR:
/* Check for a built-in function. */
&& TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
&& DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
{
- tree tmp = fold_builtin (t, false);
+ tree fndecl = TREE_OPERAND (op0, 0);
+ tree arglist = op1;
+ tree tmp = fold_builtin (fndecl, arglist, false);
if (tmp)
return tmp;
}
- return t;
+ return NULL_TREE;
+
+ case BIT_FIELD_REF:
+ if (TREE_CODE (arg0) == VECTOR_CST
+ && type == TREE_TYPE (TREE_TYPE (arg0))
+ && host_integerp (arg1, 1)
+ && host_integerp (op2, 1))
+ {
+ unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
+ unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
+
+ if (width != 0
+ && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
+ && (idx % width) == 0
+ && (idx = idx / width)
+ < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
+ {
+ tree elements = TREE_VECTOR_CST_ELTS (arg0);
+ while (idx-- > 0)
+ elements = TREE_CHAIN (elements);
+ return TREE_VALUE (elements);
+ }
+ }
+ return NULL_TREE;
default:
- return t;
+ return NULL_TREE;
} /* switch (code) */
}
const tree t = expr;
enum tree_code code = TREE_CODE (t);
enum tree_code_class kind = TREE_CODE_CLASS (code);
+ tree tem;
/* Return right away if a constant. */
if (kind == tcc_constant)
if (IS_EXPR_CODE_CLASS (kind))
{
+ tree type = TREE_TYPE (t);
+ tree op0, op1, op2;
+
switch (TREE_CODE_LENGTH (code))
{
case 1:
- return fold_unary (expr);
+ op0 = TREE_OPERAND (t, 0);
+ tem = fold_unary (code, type, op0);
+ return tem ? tem : expr;
case 2:
- return fold_binary (expr);
+ op0 = TREE_OPERAND (t, 0);
+ op1 = TREE_OPERAND (t, 1);
+ tem = fold_binary (code, type, op0, op1);
+ return tem ? tem : expr;
case 3:
- return fold_ternary (expr);
+ op0 = TREE_OPERAND (t, 0);
+ op1 = TREE_OPERAND (t, 1);
+ op2 = TREE_OPERAND (t, 2);
+ tem = fold_ternary (code, type, op0, op1, op2);
+ return tem ? tem : expr;
default:
break;
}
enum tree_code code;
char buf[sizeof (struct tree_decl)];
int i, len;
+
+recursive_label:
gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
<= sizeof (struct tree_decl))
}
else if (TREE_CODE_CLASS (code) == tcc_type
&& (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
- || TYPE_CACHED_VALUES_P (expr)))
+ || TYPE_CACHED_VALUES_P (expr)
+ || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
{
/* Allow these fields to be modified. */
memcpy (buf, expr, tree_size (expr));
expr = (tree) buf;
+ TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
TYPE_POINTER_TO (expr) = NULL;
TYPE_REFERENCE_TO (expr) = NULL;
- TYPE_CACHED_VALUES_P (expr) = 0;
- TYPE_CACHED_VALUES (expr) = NULL;
+ if (TYPE_CACHED_VALUES_P (expr))
+ {
+ TYPE_CACHED_VALUES_P (expr) = 0;
+ TYPE_CACHED_VALUES (expr) = NULL;
+ }
}
md5_process_bytes (expr, tree_size (expr), ctx);
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
if (TREE_CODE_CLASS (code) != tcc_type
- && TREE_CODE_CLASS (code) != tcc_declaration)
+ && TREE_CODE_CLASS (code) != tcc_declaration
+ && code != TREE_LIST)
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
switch (TREE_CODE_CLASS (code))
{
case TREE_LIST:
fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
+ expr = TREE_CHAIN (expr);
+ goto recursive_label;
break;
case TREE_VEC:
for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
#endif
+/* Fold a unary tree expression with code CODE of type TYPE with an
+ operand OP0. Return a folded expression if successful. Otherwise,
+ return a tree expression with code CODE of type TYPE with an
+ operand OP0. */
+
+tree
+fold_build1 (enum tree_code code, tree type, tree op0)
+{
+ tree tem = fold_unary (code, type, op0);
+ if (tem)
+ return tem;
+
+ return build1 (code, type, op0);
+}
+
+/* Fold a binary tree expression with code CODE of type TYPE with
+ operands OP0 and OP1. Return a folded expression if successful.
+ Otherwise, return a tree expression with code CODE of type TYPE
+ with operands OP0 and OP1. */
+
+tree
+fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
+{
+ tree tem = fold_binary (code, type, op0, op1);
+ if (tem)
+ return tem;
+
+ return build2 (code, type, op0, op1);
+}
+
+/* Fold a ternary tree expression with code CODE of type TYPE with
+ operands OP0, OP1, and OP2. Return a folded expression if
+ successful. Otherwise, return a tree expression with code CODE of
+ type TYPE with operands OP0, OP1, and OP2. */
+
+tree
+fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
+{
+ tree tem = fold_ternary (code, type, op0, op1, op2);
+ if (tem)
+ return tem;
+
+ return build3 (code, type, op0, op1, op2);
+}
+
/* Perform constant folding and related simplification of initializer
expression EXPR. This behaves identically to "fold" but ignores
potential run-time traps and exceptions that fold must preserve. */
CASE_BUILTIN_F (BUILT_IN_EXPM1)
CASE_BUILTIN_F (BUILT_IN_FLOOR)
CASE_BUILTIN_F (BUILT_IN_FMOD)
+ CASE_BUILTIN_F (BUILT_IN_LCEIL)
CASE_BUILTIN_F (BUILT_IN_LDEXP)
+ CASE_BUILTIN_F (BUILT_IN_LFLOOR)
+ CASE_BUILTIN_F (BUILT_IN_LLCEIL)
+ CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
CASE_BUILTIN_F (BUILT_IN_LLRINT)
CASE_BUILTIN_F (BUILT_IN_LLROUND)
CASE_BUILTIN_F (BUILT_IN_LRINT)
return false;
}
-/* See if we are applying CODE, a relational to the highest or lowest
- possible integer of TYPE. If so, then the result is a compile
- time constant. */
-
-static tree
-fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
- tree *op1_p)
-{
- tree op0 = *op0_p;
- tree op1 = *op1_p;
- enum tree_code code = *code_p;
- int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
-
- if (TREE_CODE (op1) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (op1)
- && width <= HOST_BITS_PER_WIDE_INT
- && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
- || POINTER_TYPE_P (TREE_TYPE (op1))))
- {
- unsigned HOST_WIDE_INT signed_max;
- unsigned HOST_WIDE_INT max, min;
-
- signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
-
- if (TYPE_UNSIGNED (TREE_TYPE (op1)))
- {
- max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
- min = 0;
- }
- else
- {
- max = signed_max;
- min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
- }
-
- if (TREE_INT_CST_HIGH (op1) == 0
- && TREE_INT_CST_LOW (op1) == max)
- switch (code)
- {
- case GT_EXPR:
- return omit_one_operand (type, integer_zero_node, op0);
-
- case GE_EXPR:
- *code_p = EQ_EXPR;
- break;
- case LE_EXPR:
- return omit_one_operand (type, integer_one_node, op0);
-
- case LT_EXPR:
- *code_p = NE_EXPR;
- break;
-
- /* The GE_EXPR and LT_EXPR cases above are not normally
- reached because of previous transformations. */
-
- default:
- break;
- }
- else if (TREE_INT_CST_HIGH (op1) == 0
- && TREE_INT_CST_LOW (op1) == max - 1)
- switch (code)
- {
- case GT_EXPR:
- *code_p = EQ_EXPR;
- *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
- break;
- case LE_EXPR:
- *code_p = NE_EXPR;
- *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
- break;
- default:
- break;
- }
- else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (op1) == min)
- switch (code)
- {
- case LT_EXPR:
- return omit_one_operand (type, integer_zero_node, op0);
-
- case LE_EXPR:
- *code_p = EQ_EXPR;
- break;
-
- case GE_EXPR:
- return omit_one_operand (type, integer_one_node, op0);
-
- case GT_EXPR:
- *code_p = NE_EXPR;
- break;
-
- default:
- break;
- }
- else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (op1) == min + 1)
- switch (code)
- {
- case GE_EXPR:
- *code_p = NE_EXPR;
- *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
- break;
- case LT_EXPR:
- *code_p = EQ_EXPR;
- *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
- break;
- default:
- break;
- }
-
- else if (TREE_INT_CST_HIGH (op1) == 0
- && TREE_INT_CST_LOW (op1) == signed_max
- && TYPE_UNSIGNED (TREE_TYPE (op1))
- /* signed_type does not work on pointer types. */
- && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
- {
- /* The following case also applies to X < signed_max+1
- and X >= signed_max+1 because previous transformations. */
- if (code == LE_EXPR || code == GT_EXPR)
- {
- tree st0, st1, exp, retval;
- st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
- st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
-
- exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
- type,
- fold_convert (st0, op0),
- fold_convert (st1, integer_zero_node));
-
- retval = fold_binary_to_constant (TREE_CODE (exp),
- TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- TREE_OPERAND (exp, 1));
-
- /* If we are in gimple form, then returning EXP would create
- non-gimple expressions. Clearing it is safe and insures
- we do not allow a non-gimple expression to escape. */
- if (in_gimple_form)
- exp = NULL;
-
- return (retval ? retval : exp);
- }
- }
- }
-
- return NULL_TREE;
-}
-
-
/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
attempt to fold the expression to a constant without modifying TYPE,
OP0 or OP1.
If the expression could be simplified to a constant, then return
the constant. If the expression would not be simplified to a
- constant, then return NULL_TREE.
-
- Note this is primarily designed to be called after gimplification
- of the tree structures and when at least one operand is a constant.
- As a result of those simplifying assumptions this routine is far
- simpler than the generic fold routine. */
+ constant, then return NULL_TREE. */
tree
fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
{
- int wins = 1;
- tree subop0;
- tree subop1;
- tree tem;
-
- /* If this is a commutative operation, and ARG0 is a constant, move it
- to ARG1 to reduce the number of tests below. */
- if (commutative_tree_code (code)
- && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
- {
- tem = op0;
- op0 = op1;
- op1 = tem;
- }
-
- /* If either operand is a complex type, extract its real component. */
- if (TREE_CODE (op0) == COMPLEX_CST)
- subop0 = TREE_REALPART (op0);
- else
- subop0 = op0;
-
- if (TREE_CODE (op1) == COMPLEX_CST)
- subop1 = TREE_REALPART (op1);
- else
- subop1 = op1;
-
- /* Note if either argument is not a real or integer constant.
- With a few exceptions, simplification is limited to cases
- where both arguments are constants. */
- if ((TREE_CODE (subop0) != INTEGER_CST
- && TREE_CODE (subop0) != REAL_CST)
- || (TREE_CODE (subop1) != INTEGER_CST
- && TREE_CODE (subop1) != REAL_CST))
- wins = 0;
-
- switch (code)
- {
- case PLUS_EXPR:
- /* (plus (address) (const_int)) is a constant. */
- if (TREE_CODE (op0) == PLUS_EXPR
- && TREE_CODE (op1) == INTEGER_CST
- && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
- || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
- == ADDR_EXPR)))
- && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
- {
- return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
- const_binop (PLUS_EXPR, op1,
- TREE_OPERAND (op0, 1), 0));
- }
- case BIT_XOR_EXPR:
-
- binary:
- if (!wins)
- return NULL_TREE;
-
- /* Both arguments are constants. Simplify. */
- tem = const_binop (code, op0, op1, 0);
- if (tem != NULL_TREE)
- {
- /* The return value should always have the same type as
- the original expression. */
- if (TREE_TYPE (tem) != type)
- tem = fold_convert (type, tem);
-
- return tem;
- }
- return NULL_TREE;
-
- case MINUS_EXPR:
- /* Fold &x - &x. This can happen from &x.foo - &x.
- This is unsafe for certain floats even in non-IEEE formats.
- In IEEE, it is unsafe because it does wrong for NaNs.
- Also note that operand_equal_p is always false if an
- operand is volatile. */
- if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
- return fold_convert (type, integer_zero_node);
-
- goto binary;
-
- case MULT_EXPR:
- case BIT_AND_EXPR:
- /* Special case multiplication or bitwise AND where one argument
- is zero. */
- if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
- return omit_one_operand (type, op1, op0);
- else
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
- && real_zerop (op1))
- return omit_one_operand (type, op1, op0);
-
- goto binary;
-
- case BIT_IOR_EXPR:
- /* Special case when we know the result will be all ones. */
- if (integer_all_onesp (op1))
- return omit_one_operand (type, op1, op0);
-
- goto binary;
-
- case TRUNC_DIV_EXPR:
- case ROUND_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case CEIL_DIV_EXPR:
- case EXACT_DIV_EXPR:
- case TRUNC_MOD_EXPR:
- case ROUND_MOD_EXPR:
- case FLOOR_MOD_EXPR:
- case CEIL_MOD_EXPR:
- case RDIV_EXPR:
- /* Division by zero is undefined. */
- if (integer_zerop (op1))
- return NULL_TREE;
-
- if (TREE_CODE (op1) == REAL_CST
- && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
- && real_zerop (op1))
- return NULL_TREE;
-
- goto binary;
-
- case MIN_EXPR:
- if (INTEGRAL_TYPE_P (type)
- && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
- return omit_one_operand (type, op1, op0);
-
- goto binary;
-
- case MAX_EXPR:
- if (INTEGRAL_TYPE_P (type)
- && TYPE_MAX_VALUE (type)
- && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
- return omit_one_operand (type, op1, op0);
-
- goto binary;
-
- case RSHIFT_EXPR:
- /* Optimize -1 >> x for arithmetic right shifts. */
- if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
- return omit_one_operand (type, op0, op1);
- /* ... fall through ... */
-
- case LSHIFT_EXPR:
- if (integer_zerop (op0))
- return omit_one_operand (type, op0, op1);
-
- /* Since negative shift count is not well-defined, don't
- try to compute it in the compiler. */
- if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
- return NULL_TREE;
-
- goto binary;
-
- case LROTATE_EXPR:
- case RROTATE_EXPR:
- /* -1 rotated either direction by any amount is still -1. */
- if (integer_all_onesp (op0))
- return omit_one_operand (type, op0, op1);
-
- /* 0 rotated either direction by any amount is still zero. */
- if (integer_zerop (op0))
- return omit_one_operand (type, op0, op1);
-
- goto binary;
-
- case COMPLEX_EXPR:
- if (wins)
- return build_complex (type, op0, op1);
- return NULL_TREE;
-
- case LT_EXPR:
- case LE_EXPR:
- case GT_EXPR:
- case GE_EXPR:
- case EQ_EXPR:
- case NE_EXPR:
- /* If one arg is a real or integer constant, put it last. */
- if ((TREE_CODE (op0) == INTEGER_CST
- && TREE_CODE (op1) != INTEGER_CST)
- || (TREE_CODE (op0) == REAL_CST
- && TREE_CODE (op0) != REAL_CST))
- {
- tree temp;
-
- temp = op0;
- op0 = op1;
- op1 = temp;
- code = swap_tree_comparison (code);
- }
-
- /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
- This transformation affects the cases which are handled in later
- optimizations involving comparisons with non-negative constants. */
- if (TREE_CODE (op1) == INTEGER_CST
- && TREE_CODE (op0) != INTEGER_CST
- && tree_int_cst_sgn (op1) > 0)
- {
- switch (code)
- {
- case GE_EXPR:
- code = GT_EXPR;
- op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
- break;
-
- case LT_EXPR:
- code = LE_EXPR;
- op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
- break;
-
- default:
- break;
- }
- }
-
- tem = fold_relational_hi_lo (&code, type, &op0, &op1);
- if (tem)
- return tem;
-
- /* Fall through. */
-
- case ORDERED_EXPR:
- case UNORDERED_EXPR:
- case UNLT_EXPR:
- case UNLE_EXPR:
- case UNGT_EXPR:
- case UNGE_EXPR:
- case UNEQ_EXPR:
- case LTGT_EXPR:
- if (!wins)
- return NULL_TREE;
-
- return fold_relational_const (code, type, op0, op1);
-
- case RANGE_EXPR:
- /* This could probably be handled. */
- return NULL_TREE;
-
- case TRUTH_AND_EXPR:
- /* If second arg is constant zero, result is zero, but first arg
- must be evaluated. */
- if (integer_zerop (op1))
- return omit_one_operand (type, op1, op0);
- /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
- case will be handled here. */
- if (integer_zerop (op0))
- return omit_one_operand (type, op0, op1);
- if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
- return constant_boolean_node (true, type);
- return NULL_TREE;
-
- case TRUTH_OR_EXPR:
- /* If second arg is constant true, result is true, but we must
- evaluate first arg. */
- if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
- return omit_one_operand (type, op1, op0);
- /* Likewise for first arg, but note this only occurs here for
- TRUTH_OR_EXPR. */
- if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
- return omit_one_operand (type, op0, op1);
- if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
- return constant_boolean_node (false, type);
- return NULL_TREE;
-
- case TRUTH_XOR_EXPR:
- if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
- {
- int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
- return constant_boolean_node (x, type);
- }
- return NULL_TREE;
-
- default:
- return NULL_TREE;
- }
+ tree tem = fold_binary (code, type, op0, op1);
+ return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
}
/* Given the components of a unary expression CODE, TYPE and OP0,
If the expression could be simplified to a constant, then return
the constant. If the expression would not be simplified to a
- constant, then return NULL_TREE.
-
- Note this is primarily designed to be called after gimplification
- of the tree structures and when op0 is a constant. As a result
- of those simplifying assumptions this routine is far simpler than
- the generic fold routine. */
+ constant, then return NULL_TREE. */
tree
fold_unary_to_constant (enum tree_code code, tree type, tree op0)
{
- /* Make sure we have a suitable constant argument. */
- if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
- {
- tree subop;
-
- if (TREE_CODE (op0) == COMPLEX_CST)
- subop = TREE_REALPART (op0);
- else
- subop = op0;
-
- if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
- return NULL_TREE;
- }
-
- switch (code)
- {
- case NOP_EXPR:
- case FLOAT_EXPR:
- case CONVERT_EXPR:
- case FIX_TRUNC_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_CEIL_EXPR:
- return fold_convert_const (code, type, op0);
-
- case NEGATE_EXPR:
- if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
- return fold_negate_const (op0, type);
- else
- return NULL_TREE;
-
- case ABS_EXPR:
- if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
- return fold_abs_const (op0, type);
- else
- return NULL_TREE;
-
- case BIT_NOT_EXPR:
- if (TREE_CODE (op0) == INTEGER_CST)
- return fold_not_const (op0, type);
- else
- return NULL_TREE;
-
- case REALPART_EXPR:
- if (TREE_CODE (op0) == COMPLEX_CST)
- return TREE_REALPART (op0);
- else
- return NULL_TREE;
-
- case IMAGPART_EXPR:
- if (TREE_CODE (op0) == COMPLEX_CST)
- return TREE_IMAGPART (op0);
- else
- return NULL_TREE;
-
- case CONJ_EXPR:
- if (TREE_CODE (op0) == COMPLEX_CST
- && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
- return build_complex (type, TREE_REALPART (op0),
- negate_expr (TREE_IMAGPART (op0)));
- return NULL_TREE;
-
- default:
- return NULL_TREE;
- }
+ tree tem = fold_unary (code, type, op0);
+ return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
}
/* If EXP represents referencing an element in a constant string
return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
}
-/* Given a pointer value T, return a simplified version of an indirection
- through T, or NULL_TREE if no simplification is possible. */
+/* Given a pointer value OP0 and a type TYPE, return a simplified version
+ of an indirection through OP0, or NULL_TREE if no simplification is
+ possible. */
-static tree
-fold_indirect_ref_1 (tree t)
+tree
+fold_indirect_ref_1 (tree type, tree op0)
{
- tree type = TREE_TYPE (TREE_TYPE (t));
- tree sub = t;
+ tree sub = op0;
tree subtype;
STRIP_NOPS (sub);
tree op = TREE_OPERAND (sub, 0);
tree optype = TREE_TYPE (op);
/* *&p => p */
- if (lang_hooks.types_compatible_p (type, optype))
+ if (type == optype)
return op;
/* *(foo *)&fooarray => fooarray[0] */
else if (TREE_CODE (optype) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
+ && type == TREE_TYPE (optype))
{
tree type_domain = TYPE_DOMAIN (optype);
tree min_val = size_zero_node;
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
+ && type == TREE_TYPE (TREE_TYPE (subtype)))
{
tree type_domain;
tree min_val = size_zero_node;
tree
build_fold_indirect_ref (tree t)
{
- tree sub = fold_indirect_ref_1 (t);
+ tree type = TREE_TYPE (TREE_TYPE (t));
+ tree sub = fold_indirect_ref_1 (type, t);
if (sub)
return sub;
else
- return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
+ return build1 (INDIRECT_REF, type, t);
}
/* Given an INDIRECT_REF T, return either T or a simplified version. */
tree
fold_indirect_ref (tree t)
{
- tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
+ tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
if (sub)
return sub;
if (type != TREE_TYPE (toffset2))
toffset2 = fold_convert (type, toffset2);
- tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
+ tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
if (!host_integerp (tdiff, 0))
return false;
arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
if (arg0 != NULL_TREE || arg1 != NULL_TREE)
- return fold (build2 (TREE_CODE (exp), TREE_TYPE (exp),
- arg0 ? arg0 : TREE_OPERAND (exp, 0),
- arg1 ? arg1 : TREE_OPERAND (exp, 1)));
+ return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
+ arg0 ? arg0 : TREE_OPERAND (exp, 0),
+ arg1 ? arg1 : TREE_OPERAND (exp, 1));
break;
default: