static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
tree);
static tree fold_range_test (tree);
-static tree fold_cond_expr_with_comparison (tree, tree, tree);
+static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
static tree unextend (tree, int, int, tree);
static tree fold_truthop (enum tree_code, tree, tree, tree);
static tree optimize_minmax_comparison (tree);
return fold (build1 (NOP_EXPR, type, arg));
}
else if (VOID_TYPE_P (type))
- return fold (build1 (CONVERT_EXPR, type, arg));
+ return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
abort ();
}
\f
case ARRAY_REF:
case ARRAY_RANGE_REF:
case BIT_FIELD_REF:
- case BUFFER_REF:
case OBJ_TYPE_REF:
case REALPART_EXPR:
case BIND_EXPR:
case MIN_EXPR:
case MAX_EXPR:
- case RTL_EXPR:
break;
default:
switch (TREE_CODE (arg0))
{
case INDIRECT_REF:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
return operand_equal_p (TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg1, 0), flags);
&& operand_equal_p (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 0), flags));
- case RTL_EXPR:
- return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
-
case CALL_EXPR:
/* If the CALL_EXPRs call different functions, then they
clearly can not be equal. */
|| code == COMPOUND_EXPR))
class = '2';
- else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
+ else if (class == 'e' && code == SAVE_EXPR
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
{
/* If we've already found a CVAL1 or CVAL2, this expression is
tree t = fold_convert (type, result);
if (TREE_SIDE_EFFECTS (omitted))
- return build2 (COMPOUND_EXPR, type, omitted, t);
+ return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
return non_lvalue (t);
}
tree t = fold_convert (type, result);
if (TREE_SIDE_EFFECTS (omitted))
- return build2 (COMPOUND_EXPR, type, omitted, t);
+ return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
return pedantic_non_lvalue (t);
}
/* If the number of bits in the reference is the same as the bitsize of
the outer type, then the outer type gives the signedness. Otherwise
(in case of a small bitfield) the signedness is unchanged. */
- if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
+ if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
*punsignedp = TYPE_UNSIGNED (outer_type);
/* Compute the mask to access the bitfield. */
/* If we're converting arg0 from an unsigned type, to exp,
- a signed type, we will be doing the compairson as unsigned.
+ a signed type, we will be doing the comparison as unsigned.
The tests above have already verified that LOW and HIGH
are both positive.
\f
/* Subroutine of fold, looking inside expressions of the form
- A op B ? A : C, where ARG0 is A op B and ARG2 is C. This
- function is being used also to optimize A op B ? C : A, by
- reversing the comparison first.
+ A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
+ of the COND_EXPR. This function is being used also to optimize
+ A op B ? C : A, by reversing the comparison first.
Return a folded expression whose code is not a COND_EXPR
anymore, or NULL_TREE if no folding opportunity is found. */
static tree
-fold_cond_expr_with_comparison (tree type, tree arg0, tree arg2)
+fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
{
enum tree_code comp_code = TREE_CODE (arg0);
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
+ tree arg1_type = TREE_TYPE (arg1);
tree tem;
+
+ STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
/* If we have A op 0 ? A : -A, consider applying the following
? real_zerop (arg01)
: integer_zerop (arg01))
&& TREE_CODE (arg2) == NEGATE_EXPR
- && operand_equal_p (TREE_OPERAND (arg2, 0), arg00, 0))
+ && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
switch (comp_code)
{
case EQ_EXPR:
- return fold_convert (type, negate_expr (arg00));
+ tem = fold_convert (arg1_type, arg1);
+ return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
case NE_EXPR:
- return pedantic_non_lvalue (fold_convert (type, arg00));
+ return pedantic_non_lvalue (fold_convert (type, arg1));
case GE_EXPR:
case GT_EXPR:
- if (TYPE_UNSIGNED (TREE_TYPE (arg00)))
- arg00 = fold_convert (lang_hooks.types.signed_type
- (TREE_TYPE (arg00)), arg00);
- tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg00), arg00));
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ arg1 = fold_convert (lang_hooks.types.signed_type
+ (TREE_TYPE (arg1)), arg1);
+ tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
return pedantic_non_lvalue (fold_convert (type, tem));
case LE_EXPR:
case LT_EXPR:
- if (TYPE_UNSIGNED (TREE_TYPE (arg00)))
- arg00 = fold_convert (lang_hooks.types.signed_type
- (TREE_TYPE (arg00)), arg00);
- tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg00), arg00));
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ arg1 = fold_convert (lang_hooks.types.signed_type
+ (TREE_TYPE (arg1)), arg1);
+ tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
return negate_expr (fold_convert (type, tem));
default:
abort ();
if (integer_zerop (arg01) && integer_zerop (arg2))
{
if (comp_code == NE_EXPR)
- return pedantic_non_lvalue (fold_convert (type, arg00));
+ return pedantic_non_lvalue (fold_convert (type, arg1));
else if (comp_code == EQ_EXPR)
return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
}
if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
{
comp_type = type;
- comp_op0 = arg00;
+ comp_op0 = arg1;
comp_op1 = arg2;
}
case EQ_EXPR:
return pedantic_non_lvalue (fold_convert (type, arg2));
case NE_EXPR:
- return pedantic_non_lvalue (fold_convert (type, arg00));
+ return pedantic_non_lvalue (fold_convert (type, arg1));
case LE_EXPR:
case LT_EXPR:
/* In C++ a ?: expression can be an lvalue, so put the
operand which will be used if they are equal first
so that we can convert this back to the
corresponding COND_EXPR. */
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))))
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
return pedantic_non_lvalue (
fold_convert (type, fold (build2 (MIN_EXPR, comp_type,
(comp_code == LE_EXPR
break;
case GE_EXPR:
case GT_EXPR:
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))))
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
return pedantic_non_lvalue (
fold_convert (type, fold (build2 (MAX_EXPR, comp_type,
(comp_code == GE_EXPR
{
case EQ_EXPR:
/* We can replace A with C1 in this case. */
- arg00 = fold_convert (type, arg01);
- return fold (build3 (COND_EXPR, type, arg0, arg00, arg2));
+ arg1 = fold_convert (type, arg01);
+ return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
case LT_EXPR:
/* If C1 is C2 + 1, this is min(A, C2). */
integer_one_node, 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
- type, arg00, arg2)));
+ type, arg1, arg2)));
break;
case LE_EXPR:
integer_one_node, 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
- type, arg00, arg2)));
+ type, arg1, arg2)));
break;
case GT_EXPR:
integer_one_node, 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
- type, arg00, arg2)));
+ type, arg1, arg2)));
break;
case GE_EXPR:
integer_one_node, 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
- type, arg00, arg2)));
+ type, arg1, arg2)));
break;
case NE_EXPR:
break;
inner, size_int (bitnum));
if (code == EQ_EXPR)
- inner = build2 (BIT_XOR_EXPR, intermediate_type,
- inner, integer_one_node);
+ inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
+ inner, integer_one_node));
/* Put the AND last so it can combine with more things. */
inner = build2 (BIT_AND_EXPR, intermediate_type,
if all operands are constant. */
int wins = 1;
- /* Don't try to process an RTL_EXPR since its operands aren't trees.
- Likewise for a SAVE_EXPR that's already been evaluated. */
- if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
- return t;
-
/* Return right away if a constant. */
if (kind == 'c')
return t;
if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
return non_lvalue (fold_convert (type, arg1));
+ /* Convert X + -C into X - C. */
+ if (TREE_CODE (arg1) == REAL_CST
+ && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
+ {
+ tem = fold_negate_const (arg1, type);
+ if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
+ return fold (build2 (MINUS_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, tem)));
+ }
+
/* Convert x+x into x*2.0. */
if (operand_equal_p (arg0, arg1, 0)
&& SCALAR_FLOAT_TYPE_P (type))
/* A - B -> A + (-B) if B is easily negatable. */
if (!wins && negate_expr_p (arg1)
- && (FLOAT_TYPE_P (type)
+ && ((FLOAT_TYPE_P (type)
+ /* Avoid this transformation if B is a positive REAL_CST. */
+ && (TREE_CODE (arg1) != REAL_CST
+ || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
|| (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
return non_lvalue (fold_convert (type, arg0));
if (operand_equal_p (arg0, arg1, 0))
return non_lvalue (fold_convert (type, arg0));
+
+ /* ~X | X is -1. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ {
+ t1 = build_int_2 (-1, -1);
+ TREE_TYPE (t1) = type;
+ force_fit_type (t1, 0);
+ return omit_one_operand (type, t1, arg1);
+ }
+
+ /* X | ~X is -1. */
+ if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ {
+ t1 = build_int_2 (-1, -1);
+ TREE_TYPE (t1) = type;
+ force_fit_type (t1, 0);
+ return omit_one_operand (type, t1, arg0);
+ }
+
t1 = distribute_bit_expr (code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
if (operand_equal_p (arg0, arg1, 0))
return omit_one_operand (type, integer_zero_node, arg0);
+ /* ~X ^ X is -1. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ {
+ t1 = build_int_2 (-1, -1);
+ TREE_TYPE (t1) = type;
+ force_fit_type (t1, 0);
+ return omit_one_operand (type, t1, arg1);
+ }
+
+ /* X ^ ~X is -1. */
+ if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ {
+ t1 = build_int_2 (-1, -1);
+ TREE_TYPE (t1) = type;
+ force_fit_type (t1, 0);
+ return omit_one_operand (type, t1, arg0);
+ }
+
/* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
return omit_one_operand (type, arg1, arg0);
if (operand_equal_p (arg0, arg1, 0))
return non_lvalue (fold_convert (type, arg0));
+
+ /* ~X & X is always zero. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return omit_one_operand (type, integer_zero_node, arg1);
+
+ /* X & ~X is always zero. */
+ if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return omit_one_operand (type, integer_zero_node, arg0);
+
t1 = distribute_bit_expr (code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
return omit_one_operand (type, integer_zero_node, arg0);
if (integer_zerop (arg1))
return t;
+
/* X % -1 is zero. */
if (!TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_INT_CST_HIGH (arg1) == -1)
return omit_one_operand (type, integer_zero_node, arg0);
+ /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
+ BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
+ if (code == TRUNC_MOD_EXPR
+ && TYPE_UNSIGNED (type)
+ && integer_pow2p (arg1))
+ {
+ unsigned HOST_WIDE_INT high, low;
+ tree mask;
+ int l;
+
+ l = tree_log2 (arg1);
+ if (l >= HOST_BITS_PER_WIDE_INT)
+ {
+ high = ((unsigned HOST_WIDE_INT) 1
+ << (l - HOST_BITS_PER_WIDE_INT)) - 1;
+ low = -1;
+ }
+ else
+ {
+ high = 0;
+ low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
+ }
+
+ mask = build_int_2 (low, high);
+ TREE_TYPE (mask) = type;
+ return fold (build2 (BIT_AND_EXPR, type,
+ fold_convert (type, arg0), mask));
+ }
+
+ /* X % -C is the same as X % C (for all rounding moduli). */
+ if (!TYPE_UNSIGNED (type)
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_INT_CST_HIGH (arg1) < 0
+ && !flag_trapv
+ /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
+ && !sign_bit_p (arg1, arg1))
+ return fold (build2 (code, type, fold_convert (type, arg0),
+ fold_convert (type, negate_expr (arg1))));
+
+ /* X % -Y is the same as X % Y (for all rounding moduli). */
+ if (!TYPE_UNSIGNED (type)
+ && TREE_CODE (arg1) == NEGATE_EXPR
+ && !flag_trapv)
+ return fold (build2 (code, type, fold_convert (type, arg0),
+ fold_convert (type, TREE_OPERAND (arg1, 0))));
+
if (TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
code, NULL_TREE)))
if (integer_zerop (arg0))
return omit_one_operand (type, arg0, arg1);
+ /* !X && X is always false. */
+ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return omit_one_operand (type, integer_zero_node, arg1);
+ /* X && !X is always false. */
+ if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return omit_one_operand (type, integer_zero_node, arg0);
+
truth_andor:
/* We only do these simplifications if we are optimizing. */
if (!optimize)
TRUTH_OR_EXPR. */
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
return omit_one_operand (type, arg0, arg1);
+
+ /* !X || X is always true. */
+ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return omit_one_operand (type, integer_one_node, arg1);
+ /* X || !X is always true. */
+ if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return omit_one_operand (type, integer_one_node, arg0);
+
goto truth_andor;
case TRUTH_XOR_EXPR:
- /* If either arg is constant zero, drop it. */
- if (integer_zerop (arg0))
- return non_lvalue (fold_convert (type, arg1));
+ /* If the second arg is constant zero, drop it. */
if (integer_zerop (arg1))
return non_lvalue (fold_convert (type, arg0));
- /* If either arg is constant true, this is a logical inversion. */
- if (integer_onep (arg0))
- return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
+ /* If the second arg is constant true, this is a logical inversion. */
if (integer_onep (arg1))
return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
/* Identical arguments cancel to zero. */
if (operand_equal_p (arg0, arg1, 0))
return omit_one_operand (type, integer_zero_node, arg0);
+
+ /* !X ^ X is always true. */
+ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return omit_one_operand (type, integer_one_node, arg1);
+
+ /* X ^ !X is always true. */
+ if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return omit_one_operand (type, integer_one_node, arg0);
+
return t;
case EQ_EXPR:
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
- tree newmod = build2 (TREE_CODE (arg0), newtype,
- fold_convert (newtype,
- TREE_OPERAND (arg0, 0)),
- fold_convert (newtype,
- TREE_OPERAND (arg0, 1)));
+ tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
+ fold_convert (newtype,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert (newtype,
+ TREE_OPERAND (arg0, 1))));
- return build2 (code, type, newmod, fold_convert (newtype, arg1));
+ return fold (build2 (code, type, newmod,
+ fold_convert (newtype, arg1)));
}
/* If this is an NE comparison of zero with an AND of one, remove the
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
{
tem = fold_cond_expr_with_comparison (type, arg0,
+ TREE_OPERAND (t, 1),
TREE_OPERAND (t, 2));
if (tem)
return tem;
tem = invert_truthvalue (arg0);
if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
{
- tem = fold_cond_expr_with_comparison (type, tem, arg1);
+ tem = fold_cond_expr_with_comparison (type, tem,
+ TREE_OPERAND (t, 2),
+ TREE_OPERAND (t, 1));
if (tem)
return tem;
}
== FUNCTION_DECL)
&& DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
{
- tree tmp = fold_builtin (t);
+ tree tmp = fold_builtin (t, false);
if (tmp)
return tmp;
}
return;
*slot = expr;
code = TREE_CODE (expr);
- if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
- {
- /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
- memcpy (buf, expr, tree_size (expr));
- expr = (tree) buf;
- SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
- }
- else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
+ if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
{
/* Allow DECL_ASSEMBLER_NAME to be modified. */
memcpy (buf, expr, tree_size (expr));
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
- len = TREE_CODE_LENGTH (code);
switch (TREE_CODE_CLASS (code))
{
case 'c':
}
break;
case 'e':
- switch (code)
- {
- case SAVE_EXPR: len = 2; break;
- case GOTO_SUBROUTINE_EXPR: len = 0; break;
- case RTL_EXPR: len = 0; break;
- case WITH_CLEANUP_EXPR: len = 2; break;
- default: break;
- }
- /* Fall through. */
case 'r':
case '<':
case '1':
case '2':
case 's':
+ len = first_rtl_op (code);
for (i = 0; i < len; ++i)
fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
break;
return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
case FLOAT_EXPR:
return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
- case RTL_EXPR:
- return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
case TARGET_EXPR:
{
return build1 (INDIRECT_REF, type, t);
}
+/* Strip non-trapping, non-side-effecting tree nodes from an expression
+ whose result is ignored. The type of the returned tree need not be
+ the same as the original expression. */
+
+tree
+fold_ignored_result (tree t)
+{
+ if (!TREE_SIDE_EFFECTS (t))
+ return integer_zero_node;
+
+ for (;;)
+ switch (TREE_CODE_CLASS (TREE_CODE (t)))
+ {
+ case '1':
+ t = TREE_OPERAND (t, 0);
+ break;
+
+ case '2':
+ case '<':
+ if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
+ t = TREE_OPERAND (t, 0);
+ else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
+ t = TREE_OPERAND (t, 1);
+ else
+ return t;
+ break;
+
+ case 'e':
+ switch (TREE_CODE (t))
+ {
+ case COMPOUND_EXPR:
+ if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
+ return t;
+ t = TREE_OPERAND (t, 0);
+ break;
+
+ case COND_EXPR:
+ if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
+ || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
+ return t;
+ t = TREE_OPERAND (t, 0);
+ break;
+
+ default:
+ return t;
+ }
+ break;
+
+ default:
+ return t;
+ }
+}
+
#include "gt-fold-const.h"