/* Combine two integer constants ARG1 and ARG2 under operation CODE
to produce a new constant. Return NULL_TREE if we don't know how
- to evaluate CODE at compile-time.
-
- If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
+ to evaluate CODE at compile-time. */
tree
-int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
+int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
{
double_int op1, op2, res, tmp;
tree t;
return NULL_TREE;
}
- if (notrunc)
- {
- t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
-
- /* Propagate overflow flags ourselves. */
- if (((!uns || is_sizetype) && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
- {
- t = copy_node (t);
- TREE_OVERFLOW (t) = 1;
- }
- }
- else
- t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
- ((!uns || is_sizetype) && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
+ t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
+ ((!uns || is_sizetype) && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
return t;
}
STRIP_NOPS (arg2);
if (TREE_CODE (arg1) == INTEGER_CST)
- return int_const_binop (code, arg1, arg2, 0);
+ return int_const_binop (code, arg1, arg2);
if (TREE_CODE (arg1) == REAL_CST)
{
}
/* Handle general case of two integer constants. */
- return int_const_binop (code, arg0, arg1, 0);
+ return int_const_binop (code, arg0, arg1);
}
return fold_build2_loc (loc, code, type, arg0, arg1);
&& flag_strict_volatile_bitfields > 0)
nmode = lmode;
else
- nmode = get_best_mode (lbitsize, lbitpos,
+ nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
const_p ? TYPE_ALIGN (TREE_TYPE (linner))
: MIN (TYPE_ALIGN (TREE_TYPE (linner)),
TYPE_ALIGN (TREE_TYPE (rinner))),
low = fold_convert_loc (loc, sizetype, low);
low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
return build_range_check (loc, type,
- fold_build2_loc (loc, POINTER_PLUS_EXPR,
- etype, exp, low),
+ fold_build_pointer_plus_loc (loc, exp, low),
1, build_int_cst (etype, 0), value);
}
return 0;
if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
&& merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
in1_p, low1, high1)
- && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
+ && 0 != (tem = (build_range_check (loc, type,
lhs != 0 ? lhs
: rhs != 0 ? rhs : integer_zero_node,
in_p, low, high))))
? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
type, op0, op1);
- else if (lang_hooks.decls.global_bindings_p () == 0
- && ! CONTAINS_PLACEHOLDER_P (lhs))
+ else if (!lang_hooks.decls.global_bindings_p ()
+ && !CONTAINS_PLACEHOLDER_P (lhs))
{
tree common = save_expr (lhs);
to be relative to a field of that size. */
first_bit = MIN (ll_bitpos, rl_bitpos);
end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
- lnmode = get_best_mode (end_bit - first_bit, first_bit,
+ lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
volatilep);
if (lnmode == VOIDmode)
first_bit = MIN (lr_bitpos, rr_bitpos);
end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
- rnmode = get_best_mode (end_bit - first_bit, first_bit,
+ rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
volatilep);
if (rnmode == VOIDmode)
/* If these are the same operation types, we can associate them
assuming no overflow. */
- if (tcode == code
- && 0 != (t1 = int_const_binop (MULT_EXPR,
- fold_convert (ctype, op1),
- fold_convert (ctype, c), 1))
- && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
- (TYPE_UNSIGNED (ctype)
- && tcode != MULT_EXPR) ? -1 : 1,
- TREE_OVERFLOW (t1)))
- && !TREE_OVERFLOW (t1))
- return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
+ if (tcode == code)
+ {
+ double_int mul;
+ int overflow_p;
+ mul = double_int_mul_with_sign
+ (double_int_ext
+ (tree_to_double_int (op1),
+ TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
+ double_int_ext
+ (tree_to_double_int (c),
+ TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
+ false, &overflow_p);
+ overflow_p = (((!TYPE_UNSIGNED (ctype)
+ || (TREE_CODE (ctype) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (ctype)))
+ && overflow_p)
+ | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
+ if (!double_int_fits_to_tree_p (ctype, mul)
+ && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
+ || !TYPE_UNSIGNED (ctype)
+ || (TREE_CODE (ctype) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (ctype))))
+ overflow_p = 1;
+ if (!overflow_p)
+ return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
+ double_int_to_tree (ctype, mul));
+ }
/* If these operations "cancel" each other, we have the main
optimizations of this pass, which occur when either constant is a
build_real (TREE_TYPE (arg), dconst0));
/* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
- if (lang_hooks.decls.global_bindings_p () != 0
- || CONTAINS_PLACEHOLDER_P (arg))
- return NULL_TREE;
-
arg = save_expr (arg);
return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
fold_build2_loc (loc, GE_EXPR, type, arg,
build_real (TREE_TYPE (arg), c2));
/* sqrt(x) < c is the same as x >= 0 && x < c*c. */
- if (lang_hooks.decls.global_bindings_p () == 0
- && ! CONTAINS_PLACEHOLDER_P (arg))
- {
- arg = save_expr (arg);
- return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
+ arg = save_expr (arg);
+ return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
fold_build2_loc (loc, GE_EXPR, type, arg,
build_real (TREE_TYPE (arg),
dconst0)),
fold_build2_loc (loc, code, type, arg,
build_real (TREE_TYPE (arg),
c2)));
- }
}
}
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
/* x <= +Inf is the same as x == x, i.e. isfinite(x). */
- if (lang_hooks.decls.global_bindings_p () == 0
- && ! CONTAINS_PLACEHOLDER_P (arg0))
- {
- arg0 = save_expr (arg0);
- return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
- }
- break;
+ arg0 = save_expr (arg0);
+ return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
case EQ_EXPR:
case GE_EXPR:
int overflow;
/* We have to do this the hard way to detect unsigned overflow.
- prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
+ prod = int_const_binop (MULT_EXPR, arg01, arg1); */
overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
TREE_INT_CST_HIGH (arg01),
TREE_INT_CST_LOW (arg1),
if (unsigned_p)
{
tmp = int_const_binop (MINUS_EXPR, arg01,
- build_int_cst (TREE_TYPE (arg01), 1), 0);
+ build_int_cst (TREE_TYPE (arg01), 1));
lo = prod;
- /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
+ /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
TREE_INT_CST_HIGH (prod),
TREE_INT_CST_LOW (tmp),
else if (tree_int_cst_sgn (arg01) >= 0)
{
tmp = int_const_binop (MINUS_EXPR, arg01,
- build_int_cst (TREE_TYPE (arg01), 1), 0);
+ build_int_cst (TREE_TYPE (arg01), 1));
switch (tree_int_cst_sgn (arg1))
{
case -1:
neg_overflow = true;
- lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
+ lo = int_const_binop (MINUS_EXPR, prod, tmp);
hi = prod;
break;
break;
case 1:
- hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
+ hi = int_const_binop (PLUS_EXPR, prod, tmp);
lo = prod;
break;
code = swap_tree_comparison (code);
tmp = int_const_binop (PLUS_EXPR, arg01,
- build_int_cst (TREE_TYPE (arg01), 1), 0);
+ build_int_cst (TREE_TYPE (arg01), 1));
switch (tree_int_cst_sgn (arg1))
{
case -1:
- hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
+ hi = int_const_binop (MINUS_EXPR, prod, tmp);
lo = prod;
break;
case 1:
neg_overflow = true;
- lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
+ lo = int_const_binop (PLUS_EXPR, prod, tmp);
hi = prod;
break;
if (arg0)
{
if (CONVERT_EXPR_CODE_P (code)
- || code == FLOAT_EXPR || code == ABS_EXPR)
+ || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
{
/* Don't use STRIP_NOPS, because signedness of argument type
matters. */
0)));
return tem;
}
- else if (COMPARISON_CLASS_P (arg0))
- {
- if (TREE_CODE (type) == BOOLEAN_TYPE)
- {
- arg0 = copy_node (arg0);
- TREE_TYPE (arg0) = type;
- return arg0;
- }
- else if (TREE_CODE (type) != INTEGER_TYPE)
- return fold_build3_loc (loc, COND_EXPR, type, arg0,
- fold_build1_loc (loc, code, type,
- integer_one_node),
- fold_build1_loc (loc, code, type,
- integer_zero_node));
- }
}
switch (code)
if (TREE_TYPE (op0) == type)
return op0;
- /* If we have (type) (a CMP b) and type is an integral type, return
- new expression involving the new type. */
- if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
- return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
- TREE_OPERAND (op0, 1));
+ if (COMPARISON_CLASS_P (op0))
+ {
+ /* If we have (type) (a CMP b) and type is an integral type, return
+ new expression involving the new type. Canonicalize
+ (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
+ non-integral type.
+ Do not fold the result as that would not simplify further, also
+ folding again results in recursions. */
+ if (TREE_CODE (type) == BOOLEAN_TYPE)
+ return build2_loc (loc, TREE_CODE (op0), type,
+ TREE_OPERAND (op0, 0),
+ TREE_OPERAND (op0, 1));
+ else if (!INTEGRAL_TYPE_P (type))
+ return build3_loc (loc, COND_EXPR, type, op0,
+ fold_convert (type, boolean_true_node),
+ fold_convert (type, boolean_false_node));
+ }
/* Handle cases of two conversions in a row. */
if (CONVERT_EXPR_P (op0))
return res;
}
+/* Fold a binary bitwise/truth expression of code CODE and type TYPE with
+ operands OP0 and OP1. LOC is the location of the resulting expression.
+ ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
+ Return the folded expression if folding is successful. Otherwise,
+ return NULL_TREE. */
+static tree
+fold_truth_andor (location_t loc, enum tree_code code, tree type,
+ tree arg0, tree arg1, tree op0, tree op1)
+{
+ tree tem;
+
+ /* We only do these simplifications if we are optimizing. */
+ if (!optimize)
+ return NULL_TREE;
+
+ /* Check for things like (A || B) && (A || C). We can convert this
+ to A || (B && C). Note that either operator can be any of the four
+ truth and/or operations and the transformation will still be
+ valid. Also note that we only care about order for the
+ ANDIF and ORIF operators. If B contains side effects, this
+ might change the truth-value of A. */
+ if (TREE_CODE (arg0) == TREE_CODE (arg1)
+ && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
+ || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
+ || TREE_CODE (arg0) == TRUTH_AND_EXPR
+ || TREE_CODE (arg0) == TRUTH_OR_EXPR)
+ && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
+ {
+ tree a00 = TREE_OPERAND (arg0, 0);
+ tree a01 = TREE_OPERAND (arg0, 1);
+ tree a10 = TREE_OPERAND (arg1, 0);
+ tree a11 = TREE_OPERAND (arg1, 1);
+ int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
+ || TREE_CODE (arg0) == TRUTH_AND_EXPR)
+ && (code == TRUTH_AND_EXPR
+ || code == TRUTH_OR_EXPR));
+
+ if (operand_equal_p (a00, a10, 0))
+ return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
+ fold_build2_loc (loc, code, type, a01, a11));
+ else if (commutative && operand_equal_p (a00, a11, 0))
+ return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
+ fold_build2_loc (loc, code, type, a01, a10));
+ else if (commutative && operand_equal_p (a01, a10, 0))
+ return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
+ fold_build2_loc (loc, code, type, a00, a11));
+
+ /* This case if tricky because we must either have commutative
+ operators or else A10 must not have side-effects. */
+
+ else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
+ && operand_equal_p (a01, a11, 0))
+ return fold_build2_loc (loc, TREE_CODE (arg0), type,
+ fold_build2_loc (loc, code, type, a00, a10),
+ a01);
+ }
+
+ /* See if we can build a range comparison. */
+ if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
+ return tem;
+
+ if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
+ || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
+ {
+ tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
+ if (tem)
+ return fold_build2_loc (loc, code, type, tem, arg1);
+ }
+
+ if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
+ || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
+ {
+ tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
+ if (tem)
+ return fold_build2_loc (loc, code, type, arg0, tem);
+ }
+
+ /* Check for the possibility of merging component references. If our
+ lhs is another similar operation, try to merge its rhs with our
+ rhs. Then try to merge our lhs and rhs. */
+ if (TREE_CODE (arg0) == code
+ && 0 != (tem = fold_truthop (loc, code, type,
+ TREE_OPERAND (arg0, 1), arg1)))
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
+
+ if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
+ return tem;
+
+ return NULL_TREE;
+}
+
/* Fold a binary expression of code CODE and type TYPE with operands
OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
Return the folded expression if folding is successful. Otherwise,
return NULL_TREE;
t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
- cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
+ cst0, build_int_cst (TREE_TYPE (cst0), 1));
if (code0 != INTEGER_CST)
t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
of lower absolute value than before. */
cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
? MINUS_EXPR : PLUS_EXPR,
- const2, const1, 0);
+ const2, const1);
if (!TREE_OVERFLOW (cst)
&& tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
{
cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
? MINUS_EXPR : PLUS_EXPR,
- const1, const2, 0);
+ const1, const2);
if (!TREE_OVERFLOW (cst)
&& tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
{
return fold_build2 (MEM_REF, type,
TREE_OPERAND (iref, 0),
int_const_binop (PLUS_EXPR, arg1,
- TREE_OPERAND (iref, 1), 0));
+ TREE_OPERAND (iref, 1)));
}
/* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
return fold_build2 (MEM_REF, type,
build_fold_addr_expr (base),
int_const_binop (PLUS_EXPR, arg1,
- size_int (coffset), 0));
+ size_int (coffset)));
}
return NULL_TREE;
fold_convert_loc (loc, sizetype,
arg0)));
- /* index +p PTR -> PTR +p index */
- if (POINTER_TYPE_P (TREE_TYPE (arg1))
- && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
- return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
- fold_convert_loc (loc, type, arg1),
- fold_convert_loc (loc, sizetype, arg0));
-
/* (PTR +p B) +p A -> PTR +p (B + A) */
if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
{
inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
arg01, fold_convert_loc (loc, sizetype, arg1));
return fold_convert_loc (loc, type,
- fold_build2_loc (loc, POINTER_PLUS_EXPR,
- TREE_TYPE (arg00),
- arg00, inner));
+ fold_build_pointer_plus_loc (loc,
+ arg00, inner));
}
/* PTR_CST +p CST -> CST1 */
}
/* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
- if (optimize_function_for_speed_p (cfun)
+ if (!in_gimple_form
+ && optimize_function_for_speed_p (cfun)
&& operand_equal_p (arg0, arg1, 0))
{
tree powfn = mathfn_built_in (type, BUILT_IN_POW);
if (operand_equal_p (arg0, arg1, 0))
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- /* ~X & X is always zero. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ /* ~X & X, (X == 0) & X, and !X & X are always zero. */
+ if ((TREE_CODE (arg0) == BIT_NOT_EXPR
+ || TREE_CODE (arg0) == TRUTH_NOT_EXPR
+ || (TREE_CODE (arg0) == EQ_EXPR
+ && integer_zerop (TREE_OPERAND (arg0, 1))))
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
- /* X & ~X is always zero. */
- if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ /* X & ~X , X & (X == 0), and X & !X are always zero. */
+ if ((TREE_CODE (arg1) == BIT_NOT_EXPR
+ || TREE_CODE (arg1) == TRUTH_NOT_EXPR
+ || (TREE_CODE (arg1) == EQ_EXPR
+ && integer_zerop (TREE_OPERAND (arg1, 1))))
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
build_int_cst (TREE_TYPE (tem), 1)),
build_int_cst (TREE_TYPE (tem), 0));
}
+ /* Fold !X & 1 as X == 0. */
+ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
+ && integer_onep (arg1))
+ {
+ tem = TREE_OPERAND (arg0, 0);
+ return fold_build2_loc (loc, EQ_EXPR, type, tem,
+ build_int_cst (TREE_TYPE (tem), 0));
+ }
/* Fold (X ^ Y) & Y as ~X & Y. */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
return fold_build2_loc (loc, RSHIFT_EXPR, type,
TREE_OPERAND (arg0, 0),
- build_int_cst (NULL_TREE, pow2));
+ build_int_cst (integer_type_node, pow2));
}
}
WARN_STRICT_OVERFLOW_MISC);
sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
- sh_cnt, build_int_cst (NULL_TREE, pow2));
+ sh_cnt,
+ build_int_cst (TREE_TYPE (sh_cnt),
+ pow2));
return fold_build2_loc (loc, RSHIFT_EXPR, type,
fold_convert_loc (loc, type, arg0), sh_cnt);
}
arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
lshift = build_int_cst (type, -1);
- lshift = int_const_binop (code, lshift, arg1, 0);
+ lshift = int_const_binop (code, lshift, arg1);
return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
}
return fold_build2_loc (loc, code, type, arg0, tem);
}
- truth_andor:
- /* We only do these simplifications if we are optimizing. */
- if (!optimize)
- return NULL_TREE;
-
- /* Check for things like (A || B) && (A || C). We can convert this
- to A || (B && C). Note that either operator can be any of the four
- truth and/or operations and the transformation will still be
- valid. Also note that we only care about order for the
- ANDIF and ORIF operators. If B contains side effects, this
- might change the truth-value of A. */
- if (TREE_CODE (arg0) == TREE_CODE (arg1)
- && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
- || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
- || TREE_CODE (arg0) == TRUTH_AND_EXPR
- || TREE_CODE (arg0) == TRUTH_OR_EXPR)
- && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
- {
- tree a00 = TREE_OPERAND (arg0, 0);
- tree a01 = TREE_OPERAND (arg0, 1);
- tree a10 = TREE_OPERAND (arg1, 0);
- tree a11 = TREE_OPERAND (arg1, 1);
- int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
- || TREE_CODE (arg0) == TRUTH_AND_EXPR)
- && (code == TRUTH_AND_EXPR
- || code == TRUTH_OR_EXPR));
-
- if (operand_equal_p (a00, a10, 0))
- return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
- fold_build2_loc (loc, code, type, a01, a11));
- else if (commutative && operand_equal_p (a00, a11, 0))
- return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
- fold_build2_loc (loc, code, type, a01, a10));
- else if (commutative && operand_equal_p (a01, a10, 0))
- return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
- fold_build2_loc (loc, code, type, a00, a11));
-
- /* This case if tricky because we must either have commutative
- operators or else A10 must not have side-effects. */
-
- else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
- && operand_equal_p (a01, a11, 0))
- return fold_build2_loc (loc, TREE_CODE (arg0), type,
- fold_build2_loc (loc, code, type, a00, a10),
- a01);
- }
-
- /* See if we can build a range comparison. */
- if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
- return tem;
-
- if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
- || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
- {
- tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
- if (tem)
- return fold_build2_loc (loc, code, type, tem, arg1);
- }
-
- if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
- || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
- {
- tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
- if (tem)
- return fold_build2_loc (loc, code, type, arg0, tem);
- }
-
- /* Check for the possibility of merging component references. If our
- lhs is another similar operation, try to merge its rhs with our
- rhs. Then try to merge our lhs and rhs. */
- if (TREE_CODE (arg0) == code
- && 0 != (tem = fold_truthop (loc, code, type,
- TREE_OPERAND (arg0, 1), arg1)))
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
-
- if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
- return tem;
+ if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
+ != NULL_TREE)
+ return tem;
return NULL_TREE;
&& operand_equal_p (n1, a0, 0)))
return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
}
- goto truth_andor;
+
+ if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
+ != NULL_TREE)
+ return tem;
+
+ return NULL_TREE;
case TRUTH_XOR_EXPR:
/* If the second arg is constant zero, drop it. */
/* bool_var != 1 becomes !bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
&& code == NE_EXPR)
- return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
- fold_convert_loc (loc, type, arg0));
+ return fold_convert_loc (loc, type,
+ fold_build1_loc (loc, TRUTH_NOT_EXPR,
+ TREE_TYPE (arg0), arg0));
/* bool_var == 0 becomes !bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
&& code == EQ_EXPR)
- return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
- fold_convert_loc (loc, type, arg0));
+ return fold_convert_loc (loc, type,
+ fold_build1_loc (loc, TRUTH_NOT_EXPR,
+ TREE_TYPE (arg0), arg0));
/* !exp != 0 becomes !exp */
if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
}
}
- /* If this is an NE comparison of zero with an AND of one, remove the
- comparison since the AND will give the correct value. */
- if (code == NE_EXPR
- && integer_zerop (arg1)
- && TREE_CODE (arg0) == BIT_AND_EXPR
- && integer_onep (TREE_OPERAND (arg0, 1)))
- return fold_convert_loc (loc, type, arg0);
-
/* If we have (A & C) == C where C is a power of 2, convert this into
(A & C) != 0. Similarly for NE_EXPR. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
TREE_OPERAND (arg0, 1))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
{
- location_t loc0 = EXPR_LOCATION (arg0);
- if (loc0 == UNKNOWN_LOCATION)
- loc0 = loc;
+ location_t loc0 = expr_location_or (arg0, loc);
tem = fold_truth_not_expr (loc0, arg0);
if (tem && COMPARISON_CLASS_P (tem))
{
if (truth_value_p (TREE_CODE (arg0))
&& tree_swap_operands_p (op1, op2, false))
{
- location_t loc0 = EXPR_LOCATION (arg0);
- if (loc0 == UNKNOWN_LOCATION)
- loc0 = loc;
+ location_t loc0 = expr_location_or (arg0, loc);
/* See if this can be inverted. If it can't, possibly because
it was a floating-point inequality comparison, don't do
anything. */
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (arg1)))
{
- location_t loc0 = EXPR_LOCATION (arg0);
- if (loc0 == UNKNOWN_LOCATION)
- loc0 = loc;
+ location_t loc0 = expr_location_or (arg0, loc);
/* Only perform transformation if ARG0 is easily inverted. */
tem = fold_truth_not_expr (loc0, arg0);
if (tem)
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (op2)))
{
- location_t loc0 = EXPR_LOCATION (arg0);
- if (loc0 == UNKNOWN_LOCATION)
- loc0 = loc;
+ location_t loc0 = expr_location_or (arg0, loc);
/* Only perform transformation if ARG0 is easily inverted. */
tem = fold_truth_not_expr (loc0, arg0);
if (tem)
if (TREE_CODE_CLASS (code) != tcc_type
&& TREE_CODE_CLASS (code) != tcc_declaration
&& code != TREE_LIST
- && code != SSA_NAME)
+ && code != SSA_NAME
+ && CODE_CONTAINS_STRUCT (code, TS_COMMON))
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
switch (TREE_CODE_CLASS (code))
{
|| tree_int_cst_sgn (bottom) < 0)))
return 0;
return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
- top, bottom, 0));
+ top, bottom));
default:
return 0;