else if (TREE_CODE (t2) == NEGATE_EXPR)
return build2 (MINUS_EXPR, type, fold_convert (type, t1),
fold_convert (type, TREE_OPERAND (t2, 0)));
+ else if (integer_zerop (t2))
+ return fold_convert (type, t1);
}
+ else if (code == MINUS_EXPR)
+ {
+ if (integer_zerop (t2))
+ return fold_convert (type, t1);
+ }
+
return build2 (code, type, fold_convert (type, t1),
fold_convert (type, t2));
}
int
operand_equal_p (tree arg0, tree arg1, unsigned int flags)
{
- /* If one is specified and the other isn't, they aren't equal and if
- neither is specified, they are.
-
- ??? This is temporary and is meant only to handle the cases of the
- optional operands for COMPONENT_REF and ARRAY_REF. */
- if ((arg0 && !arg1) || (!arg0 && arg1))
- return 0;
- else if (!arg0 && !arg1)
- return 1;
/* If either is ERROR_MARK, they aren't equal. */
- else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
+ if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
return 0;
/* If both types don't have the same signedness, then we can't consider
if (flags & OEP_ONLY_CONST)
return 0;
+/* Define macros to test an operand from arg0 and arg1 for equality and a
+ variant that allows null and views null as being different from any
+ non-null value. In the latter case, if either is null, the both
+ must be; otherwise, do the normal comparison. */
+#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
+ TREE_OPERAND (arg1, N), flags)
+
+#define OP_SAME_WITH_NULL(N) \
+ ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
+ ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
+
switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
{
case tcc_unary:
break;
}
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
+
case tcc_comparison:
case tcc_binary:
- if (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags))
+ if (OP_SAME (0) && OP_SAME (1))
return 1;
/* For commutative ops, allow the other order. */
case MISALIGNED_INDIRECT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
case ARRAY_REF:
case ARRAY_RANGE_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 3),
- TREE_OPERAND (arg1, 3), flags));
-
+ /* Operands 2 and 3 may be null. */
+ return (OP_SAME (0)
+ && OP_SAME (1)
+ && OP_SAME_WITH_NULL (2)
+ && OP_SAME_WITH_NULL (3));
case COMPONENT_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags));
-
+ /* Handle operand 2 the same as for ARRAY_REF. */
+ return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
case BIT_FIELD_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags));
+ return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
+
default:
return 0;
}
{
case ADDR_EXPR:
case TRUTH_NOT_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags);
+ return OP_SAME (0) && OP_SAME (1);
case TRUTH_AND_EXPR:
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
+ if (OP_SAME (0) && OP_SAME (1))
+ return 1;
+
+ /* Otherwise take into account this is a commutative operation. */
return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
+ TREE_OPERAND (arg1, 1), flags)
&& operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags))
- || (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 0), flags));
+ TREE_OPERAND (arg1, 0), flags));
case CALL_EXPR:
/* If the CALL_EXPRs call different functions, then they
clearly can not be equal. */
- if (! operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags))
+ if (!OP_SAME (0))
return 0;
{
default:
return 0;
}
+
+#undef OP_SAME
+#undef OP_SAME_WITH_NULL
}
\f
/* Similar to operand_equal_p, but see if ARG0 might have been made by
simple_operand_p (tree exp)
{
/* Strip any conversions that don't change the machine mode. */
- while ((TREE_CODE (exp) == NOP_EXPR
- || TREE_CODE (exp) == CONVERT_EXPR)
- && (TYPE_MODE (TREE_TYPE (exp))
- == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- exp = TREE_OPERAND (exp, 0);
+ STRIP_NOPS (exp);
return (CONSTANT_CLASS_P (exp)
+ || TREE_CODE (exp) == SSA_NAME
|| (DECL_P (exp)
&& ! TREE_ADDRESSABLE (exp)
&& ! TREE_THIS_VOLATILE (exp)
if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
{
- if (first_rtl_op (code) > 0)
+ if (TREE_CODE_LENGTH (code) > 0)
arg0 = TREE_OPERAND (exp, 0);
if (TREE_CODE_CLASS (code) == tcc_comparison
|| TREE_CODE_CLASS (code) == tcc_unary
\f
-#ifndef RANGE_TEST_NON_SHORT_CIRCUIT
-#define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
+#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
+#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
#endif
/* EXP is some logical combination of boolean tests. See if we can
/* On machines where the branch cost is expensive, if this is a
short-circuited branch and the underlying object on both sides
is the same, make a non-short-circuit operation. */
- else if (RANGE_TEST_NON_SHORT_CIRCUIT
+ else if (LOGICAL_OP_NON_SHORT_CIRCUIT
&& lhs != 0 && rhs != 0
&& (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
|| TREE_CODE (exp) == TRUTH_ORIF_EXPR)
ll_arg, rl_arg),
fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
- return build2 (code, truth_type, lhs, rhs);
+ if (LOGICAL_OP_NON_SHORT_CIRCUIT)
+ return build2 (code, truth_type, lhs, rhs);
}
/* See if the comparisons can be merged. Then get all the parameters for
return t1;
break;
- case NEGATE_EXPR: case ABS_EXPR:
+ case ABS_EXPR:
+ /* If widening the type changes it from signed to unsigned, then we
+ must avoid building ABS_EXPR itself as unsigned. */
+ if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
+ {
+ tree cstype = (*lang_hooks.types.signed_type) (ctype);
+ if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
+ {
+ t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
+ return fold_convert (ctype, t1);
+ }
+ break;
+ }
+ /* FALLTHROUGH */
+ case NEGATE_EXPR:
if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
break;
operations as unsigned. If we must use the AND, we have a choice.
Normally unsigned is faster, but for some machines signed is. */
#ifdef LOAD_EXTEND_OP
- ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
+ ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
+ && !flag_syntax_only) ? 0 : 1;
#else
ops_unsigned = 1;
#endif
return 0;
}
+/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
+ ARG0 is extended to a wider type. */
+
+static tree
+fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
+{
+ tree arg0_unw = get_unwidened (arg0, NULL_TREE);
+ tree arg1_unw;
+ tree shorter_type, outer_type;
+ tree min, max;
+ bool above, below;
+
+ if (arg0_unw == arg0)
+ return NULL_TREE;
+ shorter_type = TREE_TYPE (arg0_unw);
+
+ arg1_unw = get_unwidened (arg1, shorter_type);
+ if (!arg1_unw)
+ return NULL_TREE;
+
+ /* If possible, express the comparison in the shorter mode. */
+ if ((code == EQ_EXPR || code == NE_EXPR
+ || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
+ && (TREE_TYPE (arg1_unw) == shorter_type
+ || (TREE_CODE (arg1_unw) == INTEGER_CST
+ && TREE_CODE (shorter_type) == INTEGER_TYPE
+ && int_fits_type_p (arg1_unw, shorter_type))))
+ return fold (build (code, type, arg0_unw,
+ fold_convert (shorter_type, arg1_unw)));
+
+ if (TREE_CODE (arg1_unw) != INTEGER_CST)
+ return NULL_TREE;
+
+ /* If we are comparing with the integer that does not fit into the range
+ of the shorter type, the result is known. */
+ outer_type = TREE_TYPE (arg1_unw);
+ min = lower_bound_in_type (outer_type, shorter_type);
+ max = upper_bound_in_type (outer_type, shorter_type);
+
+ above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
+ max, arg1_unw));
+ below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
+ arg1_unw, min));
+
+ switch (code)
+ {
+ case EQ_EXPR:
+ if (above || below)
+ return omit_one_operand (type, integer_zero_node, arg0);
+ break;
+
+ case NE_EXPR:
+ if (above || below)
+ return omit_one_operand (type, integer_one_node, arg0);
+ break;
+
+ case LT_EXPR:
+ case LE_EXPR:
+ if (above)
+ return omit_one_operand (type, integer_one_node, arg0);
+ else if (below)
+ return omit_one_operand (type, integer_zero_node, arg0);
+
+ case GT_EXPR:
+ case GE_EXPR:
+ if (above)
+ return omit_one_operand (type, integer_zero_node, arg0);
+ else if (below)
+ return omit_one_operand (type, integer_one_node, arg0);
+
+ default:
+ break;
+ }
+
+ return NULL_TREE;
+}
+
+/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
+ ARG0 just the signedness is changed. */
+
+static tree
+fold_sign_changed_comparison (enum tree_code code, tree type,
+ tree arg0, tree arg1)
+{
+ tree arg0_inner, tmp;
+ tree inner_type, outer_type;
+
+ if (TREE_CODE (arg0) != NOP_EXPR)
+ return NULL_TREE;
+
+ outer_type = TREE_TYPE (arg0);
+ arg0_inner = TREE_OPERAND (arg0, 0);
+ inner_type = TREE_TYPE (arg0_inner);
+
+ if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
+ return NULL_TREE;
+
+ if (TREE_CODE (arg1) != INTEGER_CST
+ && !(TREE_CODE (arg1) == NOP_EXPR
+ && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
+ return NULL_TREE;
+
+ if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
+ && code != NE_EXPR
+ && code != EQ_EXPR)
+ return NULL_TREE;
+
+ if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ tmp = build_int_cst_wide (inner_type,
+ TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1));
+ arg1 = force_fit_type (tmp, 0,
+ TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
+ }
+ else
+ arg1 = fold_convert (inner_type, arg1);
+
+ return fold (build (code, type, arg0_inner, arg1));
+}
+
/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
step of the array. TYPE is the type of the expression. ADDR is the address.
MULT is the multiplicative expression. If the function succeeds, the new
return build1 (ADDR_EXPR, type, ret);
}
+
+/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
+ means A >= Y && A != MAX, but in this case we know that
+ A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
+
+static tree
+fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
+{
+ tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
+
+ if (TREE_CODE (bound) == LT_EXPR)
+ a = TREE_OPERAND (bound, 0);
+ else if (TREE_CODE (bound) == GT_EXPR)
+ a = TREE_OPERAND (bound, 1);
+ else
+ return NULL_TREE;
+
+ typea = TREE_TYPE (a);
+ if (!INTEGRAL_TYPE_P (typea)
+ && !POINTER_TYPE_P (typea))
+ return NULL_TREE;
+
+ if (TREE_CODE (ineq) == LT_EXPR)
+ {
+ a1 = TREE_OPERAND (ineq, 1);
+ y = TREE_OPERAND (ineq, 0);
+ }
+ else if (TREE_CODE (ineq) == GT_EXPR)
+ {
+ a1 = TREE_OPERAND (ineq, 0);
+ y = TREE_OPERAND (ineq, 1);
+ }
+ else
+ return NULL_TREE;
+
+ if (TREE_TYPE (a1) != typea)
+ return NULL_TREE;
+
+ diff = fold (build2 (MINUS_EXPR, typea, a1, a));
+ if (!integer_onep (diff))
+ return NULL_TREE;
+
+ return fold (build2 (GE_EXPR, type, a, y));
+}
+
/* Perform constant folding and related simplification of EXPR.
The related simplifications include x*1 => x, x*0 => 0, etc.,
and application of the associative law.
}
else if (IS_EXPR_CODE_CLASS (kind))
{
- int len = first_rtl_op (code);
+ int len = TREE_CODE_LENGTH (code);
int i;
for (i = 0; i < len; i++)
{
&& ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
&& (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
== TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
- && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
- && (INTEGRAL_TYPE_P
- (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
- && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
+ && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
+ && (INTEGRAL_TYPE_P
+ (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
+ && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
+ || flag_syntax_only))
tem = build1 (code, type,
build3 (COND_EXPR,
TREE_TYPE (TREE_OPERAND
change = (cst == 0);
#ifdef LOAD_EXTEND_OP
if (change
+ && !flag_syntax_only
&& (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
== ZERO_EXTEND))
{
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand (type, integer_zero_node, arg0);
+ /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
+ means A >= Y && A != MAX, but in this case we know that
+ A < X <= MAX. */
+
+ if (!TREE_SIDE_EFFECTS (arg0)
+ && !TREE_SIDE_EFFECTS (arg1))
+ {
+ tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
+ if (tem)
+ return fold (build2 (code, type, tem, arg1));
+
+ tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
+ if (tem)
+ return fold (build2 (code, type, arg0, tem));
+ }
+
truth_andor:
/* We only do these simplifications if we are optimizing. */
if (!optimize)
/* Comparisons with the highest or lowest possible integer of
the specified size will have known values.
- This is quite similar to fold_relational_hi_lo; however, my
- attempts to share the code have been nothing but trouble.
- I give up for now. */
+ This is quite similar to fold_relational_hi_lo, however,
+ attempts to share the code have been nothing but trouble. */
{
int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
return fold (build2 (code, type,
TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
- /* If we are widening one operand of an integer comparison,
- see if the other operand is similarly being widened. Perhaps we
- can do the comparison in the narrower type. */
else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
- && TREE_CODE (arg0) == NOP_EXPR
- && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
- && (code == EQ_EXPR || code == NE_EXPR
- || TYPE_UNSIGNED (TREE_TYPE (arg0))
- == TYPE_UNSIGNED (TREE_TYPE (tem)))
- && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
- && (TREE_TYPE (t1) == TREE_TYPE (tem)
- || (TREE_CODE (t1) == INTEGER_CST
- && TREE_CODE (TREE_TYPE (tem)) == INTEGER_TYPE
- && int_fits_type_p (t1, TREE_TYPE (tem)))))
- return fold (build2 (code, type, tem,
- fold_convert (TREE_TYPE (tem), t1)));
+ && TREE_CODE (arg0) == NOP_EXPR)
+ {
+ /* If we are widening one operand of an integer comparison,
+ see if the other operand is similarly being widened. Perhaps we
+ can do the comparison in the narrower type. */
+ tem = fold_widened_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+
+ /* Or if we are changing signedness. */
+ tem = fold_sign_changed_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+ }
/* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
constant, we can simplify it. */
case tcc_unary:
case tcc_binary:
case tcc_statement:
- len = first_rtl_op (code);
+ len = TREE_CODE_LENGTH (code);
for (i = 0; i < len; ++i)
fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
break;
fold_convert (st0, op0),
fold_convert (st1, integer_zero_node));
- retval
- = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
- TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- TREE_OPERAND (exp, 1));
+ retval = fold_binary_to_constant (TREE_CODE (exp),
+ TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ TREE_OPERAND (exp, 1));
/* If we are in gimple form, then returning EXP would create
non-gimple expressions. Clearing it is safe and insures
simpler than the generic fold routine. */
tree
-nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
- tree op0, tree op1)
+fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
{
int wins = 1;
tree subop0;
the generic fold routine. */
tree
-nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
- tree op0)
+fold_unary_to_constant (enum tree_code code, tree type, tree op0)
{
/* Make sure we have a suitable constant argument. */
if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
{
tree base = t;
- while (handled_component_p (base)
- || TREE_CODE (base) == REALPART_EXPR
- || TREE_CODE (base) == IMAGPART_EXPR)
+ while (handled_component_p (base))
base = TREE_OPERAND (base, 0);
if (DECL_P (base))
TREE_ADDRESSABLE (base) = 1;