+ /* If this was a conversion, and all we did was to move into
+ inside the COND_EXPR, bring it back out. But leave it if
+ it is a conversion from integer to integer and the
+ result precision is no wider than a word since such a
+ conversion is cheap and may be optimized away by combine,
+ while it couldn't if it were outside the COND_EXPR. Then return
+ so we don't get into an infinite recursion loop taking the
+ conversion out and then back in. */
+
+ if ((code == NOP_EXPR || code == CONVERT_EXPR
+ || code == NON_LVALUE_EXPR)
+ && TREE_CODE (tem) == COND_EXPR
+ && TREE_CODE (TREE_OPERAND (tem, 1)) == code
+ && TREE_CODE (TREE_OPERAND (tem, 2)) == code
+ && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
+ && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
+ && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
+ == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
+ && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
+ && (INTEGRAL_TYPE_P
+ (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
+ && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
+ || flag_syntax_only))
+ tem = build1 (code, type,
+ build3 (COND_EXPR,
+ TREE_TYPE (TREE_OPERAND
+ (TREE_OPERAND (tem, 1), 0)),
+ TREE_OPERAND (tem, 0),
+ TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
+ TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
+ return tem;
+ }
+ else if (COMPARISON_CLASS_P (arg0))
+ {
+ if (TREE_CODE (type) == BOOLEAN_TYPE)
+ {
+ arg0 = copy_node (arg0);
+ TREE_TYPE (arg0) = type;
+ return arg0;
+ }
+ else if (TREE_CODE (type) != INTEGER_TYPE)
+ return fold_build3 (COND_EXPR, type, arg0,
+ fold_build1 (code, type,
+ integer_one_node),
+ fold_build1 (code, type,
+ integer_zero_node));
+ }
+ }
+
+ switch (code)
+ {
+ case NOP_EXPR:
+ case FLOAT_EXPR:
+ case CONVERT_EXPR:
+ case FIX_TRUNC_EXPR:
+ case FIX_CEIL_EXPR:
+ case FIX_FLOOR_EXPR:
+ case FIX_ROUND_EXPR:
+ if (TREE_TYPE (op0) == type)
+ return op0;
+
+ /* If we have (type) (a CMP b) and type is an integral type, return
+ new expression involving the new type. */
+ if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
+ return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
+ TREE_OPERAND (op0, 1));
+
+ /* Handle cases of two conversions in a row. */
+ if (TREE_CODE (op0) == NOP_EXPR
+ || TREE_CODE (op0) == CONVERT_EXPR)
+ {
+ tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
+ tree inter_type = TREE_TYPE (op0);
+ int inside_int = INTEGRAL_TYPE_P (inside_type);
+ int inside_ptr = POINTER_TYPE_P (inside_type);
+ int inside_float = FLOAT_TYPE_P (inside_type);
+ int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
+ unsigned int inside_prec = TYPE_PRECISION (inside_type);
+ int inside_unsignedp = TYPE_UNSIGNED (inside_type);
+ int inter_int = INTEGRAL_TYPE_P (inter_type);
+ int inter_ptr = POINTER_TYPE_P (inter_type);
+ int inter_float = FLOAT_TYPE_P (inter_type);
+ int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
+ unsigned int inter_prec = TYPE_PRECISION (inter_type);
+ int inter_unsignedp = TYPE_UNSIGNED (inter_type);
+ int final_int = INTEGRAL_TYPE_P (type);
+ int final_ptr = POINTER_TYPE_P (type);
+ int final_float = FLOAT_TYPE_P (type);
+ int final_vec = TREE_CODE (type) == VECTOR_TYPE;
+ unsigned int final_prec = TYPE_PRECISION (type);
+ int final_unsignedp = TYPE_UNSIGNED (type);
+
+ /* In addition to the cases of two conversions in a row
+ handled below, if we are converting something to its own
+ type via an object of identical or wider precision, neither
+ conversion is needed. */
+ if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
+ && ((inter_int && final_int) || (inter_float && final_float))
+ && inter_prec >= final_prec)
+ return fold_build1 (code, type, TREE_OPERAND (op0, 0));
+
+ /* Likewise, if the intermediate and final types are either both
+ float or both integer, we don't need the middle conversion if
+ it is wider than the final type and doesn't change the signedness
+ (for integers). Avoid this if the final type is a pointer
+ since then we sometimes need the inner conversion. Likewise if
+ the outer has a precision not equal to the size of its mode. */
+ if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
+ || (inter_float && inside_float)
+ || (inter_vec && inside_vec))
+ && inter_prec >= inside_prec
+ && (inter_float || inter_vec
+ || inter_unsignedp == inside_unsignedp)
+ && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
+ && TYPE_MODE (type) == TYPE_MODE (inter_type))
+ && ! final_ptr
+ && (! final_vec || inter_prec == inside_prec))
+ return fold_build1 (code, type, TREE_OPERAND (op0, 0));
+
+ /* If we have a sign-extension of a zero-extended value, we can
+ replace that by a single zero-extension. */
+ if (inside_int && inter_int && final_int
+ && inside_prec < inter_prec && inter_prec < final_prec
+ && inside_unsignedp && !inter_unsignedp)
+ return fold_build1 (code, type, TREE_OPERAND (op0, 0));
+
+ /* Two conversions in a row are not needed unless:
+ - some conversion is floating-point (overstrict for now), or
+ - some conversion is a vector (overstrict for now), or
+ - the intermediate type is narrower than both initial and
+ final, or
+ - the intermediate type and innermost type differ in signedness,
+ and the outermost type is wider than the intermediate, or
+ - the initial type is a pointer type and the precisions of the
+ intermediate and final types differ, or
+ - the final type is a pointer type and the precisions of the
+ initial and intermediate types differ. */
+ if (! inside_float && ! inter_float && ! final_float
+ && ! inside_vec && ! inter_vec && ! final_vec
+ && (inter_prec > inside_prec || inter_prec > final_prec)
+ && ! (inside_int && inter_int
+ && inter_unsignedp != inside_unsignedp
+ && inter_prec < final_prec)
+ && ((inter_unsignedp && inter_prec > inside_prec)
+ == (final_unsignedp && final_prec > inter_prec))
+ && ! (inside_ptr && inter_prec != final_prec)
+ && ! (final_ptr && inside_prec != inter_prec)
+ && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
+ && TYPE_MODE (type) == TYPE_MODE (inter_type))
+ && ! final_ptr)
+ return fold_build1 (code, type, TREE_OPERAND (op0, 0));
+ }
+
+ /* Handle (T *)&A.B.C for A being of type T and B and C
+ living at offset zero. This occurs frequently in
+ C++ upcasting and then accessing the base. */
+ if (TREE_CODE (op0) == ADDR_EXPR
+ && POINTER_TYPE_P (type)
+ && handled_component_p (TREE_OPERAND (op0, 0)))
+ {
+ HOST_WIDE_INT bitsize, bitpos;
+ tree offset;
+ enum machine_mode mode;
+ int unsignedp, volatilep;
+ tree base = TREE_OPERAND (op0, 0);
+ base = get_inner_reference (base, &bitsize, &bitpos, &offset,
+ &mode, &unsignedp, &volatilep, false);
+ /* If the reference was to a (constant) zero offset, we can use
+ the address of the base if it has the same base type
+ as the result type. */
+ if (! offset && bitpos == 0
+ && TYPE_MAIN_VARIANT (TREE_TYPE (type))
+ == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
+ return fold_convert (type, build_fold_addr_expr (base));
+ }
+
+ if (TREE_CODE (op0) == MODIFY_EXPR
+ && TREE_CONSTANT (TREE_OPERAND (op0, 1))
+ /* Detect assigning a bitfield. */
+ && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
+ {
+ /* Don't leave an assignment inside a conversion
+ unless assigning a bitfield. */
+ tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
+ /* First do the assignment, then return converted constant. */
+ tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
+ TREE_NO_WARNING (tem) = 1;
+ TREE_USED (tem) = 1;
+ return tem;
+ }
+
+ /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
+ constants (if x has signed type, the sign bit cannot be set
+ in c). This folds extension into the BIT_AND_EXPR. */
+ if (INTEGRAL_TYPE_P (type)
+ && TREE_CODE (type) != BOOLEAN_TYPE
+ && TREE_CODE (op0) == BIT_AND_EXPR
+ && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
+ {
+ tree and = op0;
+ tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
+ int change = 0;
+
+ if (TYPE_UNSIGNED (TREE_TYPE (and))
+ || (TYPE_PRECISION (type)
+ <= TYPE_PRECISION (TREE_TYPE (and))))
+ change = 1;
+ else if (TYPE_PRECISION (TREE_TYPE (and1))
+ <= HOST_BITS_PER_WIDE_INT
+ && host_integerp (and1, 1))
+ {
+ unsigned HOST_WIDE_INT cst;
+
+ cst = tree_low_cst (and1, 1);
+ cst &= (HOST_WIDE_INT) -1
+ << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
+ change = (cst == 0);
+#ifdef LOAD_EXTEND_OP
+ if (change
+ && !flag_syntax_only
+ && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
+ == ZERO_EXTEND))
+ {
+ tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
+ and0 = fold_convert (uns, and0);
+ and1 = fold_convert (uns, and1);
+ }
+#endif
+ }
+ if (change)
+ {
+ tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
+ TREE_INT_CST_HIGH (and1));
+ tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
+ TREE_CONSTANT_OVERFLOW (and1));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_convert (type, and0), tem);
+ }
+ }
+
+ /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
+ T2 being pointers to types of the same size. */
+ if (POINTER_TYPE_P (type)
+ && BINARY_CLASS_P (arg0)
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
+ {
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ tree t0 = type;
+ tree t1 = TREE_TYPE (arg00);
+ tree tt0 = TREE_TYPE (t0);
+ tree tt1 = TREE_TYPE (t1);
+ tree s0 = TYPE_SIZE (tt0);
+ tree s1 = TYPE_SIZE (tt1);
+
+ if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
+ return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
+ TREE_OPERAND (arg0, 1));
+ }
+
+ /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
+ of the same precision, and X is a integer type not narrower than
+ types T1 or T2, i.e. the cast (T2)X isn't an extension. */
+ if (INTEGRAL_TYPE_P (type)
+ && TREE_CODE (op0) == BIT_NOT_EXPR
+ && INTEGRAL_TYPE_P (TREE_TYPE (op0))
+ && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
+ || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
+ && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
+ {
+ tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
+ if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
+ && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
+ return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
+ }
+
+ tem = fold_convert_const (code, type, arg0);
+ return tem ? tem : NULL_TREE;
+
+ case VIEW_CONVERT_EXPR:
+ if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
+ return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
+ return fold_view_convert_expr (type, op0);
+
+ case NEGATE_EXPR:
+ if (negate_expr_p (arg0))
+ return fold_convert (type, negate_expr (arg0));
+ return NULL_TREE;
+
+ case ABS_EXPR:
+ if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
+ return fold_abs_const (arg0, type);
+ else if (TREE_CODE (arg0) == NEGATE_EXPR)
+ return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
+ /* Convert fabs((double)float) into (double)fabsf(float). */
+ else if (TREE_CODE (arg0) == NOP_EXPR
+ && TREE_CODE (type) == REAL_TYPE)
+ {
+ tree targ0 = strip_float_extensions (arg0);
+ if (targ0 != arg0)
+ return fold_convert (type, fold_build1 (ABS_EXPR,
+ TREE_TYPE (targ0),
+ targ0));
+ }
+ /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
+ else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
+ return arg0;
+
+ /* Strip sign ops from argument. */
+ if (TREE_CODE (type) == REAL_TYPE)
+ {
+ tem = fold_strip_sign_ops (arg0);
+ if (tem)
+ return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
+ }
+ return NULL_TREE;
+
+ case CONJ_EXPR:
+ if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
+ return fold_convert (type, arg0);
+ else if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ return build2 (COMPLEX_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ negate_expr (TREE_OPERAND (arg0, 1)));
+ else if (TREE_CODE (arg0) == COMPLEX_CST)
+ return build_complex (type, TREE_REALPART (arg0),
+ negate_expr (TREE_IMAGPART (arg0)));
+ else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ return fold_build2 (TREE_CODE (arg0), type,
+ fold_build1 (CONJ_EXPR, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (CONJ_EXPR, type,
+ TREE_OPERAND (arg0, 1)));
+ else if (TREE_CODE (arg0) == CONJ_EXPR)
+ return TREE_OPERAND (arg0, 0);
+ return NULL_TREE;
+
+ case BIT_NOT_EXPR:
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ return fold_not_const (arg0, type);
+ else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
+ return TREE_OPERAND (arg0, 0);
+ /* Convert ~ (-A) to A - 1. */
+ else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
+ return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
+ build_int_cst (type, 1));
+ /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
+ else if (INTEGRAL_TYPE_P (type)
+ && ((TREE_CODE (arg0) == MINUS_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1)))
+ || (TREE_CODE (arg0) == PLUS_EXPR
+ && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
+ return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
+ /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
+ else if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && (tem = fold_unary (BIT_NOT_EXPR, type,
+ fold_convert (type,
+ TREE_OPERAND (arg0, 0)))))
+ return fold_build2 (BIT_XOR_EXPR, type, tem,
+ fold_convert (type, TREE_OPERAND (arg0, 1)));
+ else if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && (tem = fold_unary (BIT_NOT_EXPR, type,
+ fold_convert (type,
+ TREE_OPERAND (arg0, 1)))))
+ return fold_build2 (BIT_XOR_EXPR, type,
+ fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
+
+ return NULL_TREE;
+
+ case TRUTH_NOT_EXPR:
+ /* The argument to invert_truthvalue must have Boolean type. */
+ if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
+ arg0 = fold_convert (boolean_type_node, arg0);
+
+ /* Note that the operand of this must be an int
+ and its values must be 0 or 1.
+ ("true" is a fixed value perhaps depending on the language,
+ but we don't handle values other than 1 correctly yet.) */
+ tem = invert_truthvalue (arg0);
+ /* Avoid infinite recursion. */
+ if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
+ return NULL_TREE;
+ return fold_convert (type, tem);
+
+ case REALPART_EXPR:
+ if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
+ return NULL_TREE;
+ else if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ return omit_one_operand (type, TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1));
+ else if (TREE_CODE (arg0) == COMPLEX_CST)
+ return TREE_REALPART (arg0);
+ else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ return fold_build2 (TREE_CODE (arg0), type,
+ fold_build1 (REALPART_EXPR, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (REALPART_EXPR, type,
+ TREE_OPERAND (arg0, 1)));
+ return NULL_TREE;
+
+ case IMAGPART_EXPR:
+ if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
+ return fold_convert (type, integer_zero_node);
+ else if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ return omit_one_operand (type, TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg0, 0));
+ else if (TREE_CODE (arg0) == COMPLEX_CST)
+ return TREE_IMAGPART (arg0);
+ else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ return fold_build2 (TREE_CODE (arg0), type,
+ fold_build1 (IMAGPART_EXPR, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (IMAGPART_EXPR, type,
+ TREE_OPERAND (arg0, 1)));
+ return NULL_TREE;
+
+ default:
+ return NULL_TREE;
+ } /* switch (code) */
+}
+
+/* Fold a binary expression of code CODE and type TYPE with operands
+ OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
+ Return the folded expression if folding is successful. Otherwise,
+ return NULL_TREE. */
+
+static tree
+fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
+{
+ enum tree_code compl_code;
+
+ if (code == MIN_EXPR)
+ compl_code = MAX_EXPR;
+ else if (code == MAX_EXPR)
+ compl_code = MIN_EXPR;
+ else
+ gcc_unreachable ();
+
+ /* MIN (MAX (a, b), b) == b. Â */
+ if (TREE_CODE (op0) == compl_code
+ && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
+ return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
+
+ /* MIN (MAX (b, a), b) == b. Â */
+ if (TREE_CODE (op0) == compl_code
+ && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
+ && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
+ return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
+
+ /* MIN (a, MAX (a, b)) == a. Â */
+ if (TREE_CODE (op1) == compl_code
+ && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
+ && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
+ return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
+
+ /* MIN (a, MAX (b, a)) == a. Â */
+ if (TREE_CODE (op1) == compl_code
+ && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
+ && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
+ return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
+
+ return NULL_TREE;
+}
+
+/* Subroutine of fold_binary. This routine performs all of the
+ transformations that are common to the equality/inequality
+ operators (EQ_EXPR and NE_EXPR) and the ordering operators
+ (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
+ fold_binary should call fold_binary. Fold a comparison with
+ tree code CODE and type TYPE with operands OP0 and OP1. Return
+ the folded comparison or NULL_TREE. */
+
+static tree
+fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
+{
+ tree arg0, arg1, tem;
+
+ arg0 = op0;
+ arg1 = op1;
+
+ STRIP_SIGN_NOPS (arg0);
+ STRIP_SIGN_NOPS (arg1);
+
+ tem = fold_relational_const (code, type, arg0, arg1);
+ if (tem != NULL_TREE)
+ return tem;
+
+ /* If one arg is a real or integer constant, put it last. */
+ if (tree_swap_operands_p (arg0, arg1, true))
+ return fold_build2 (swap_tree_comparison (code), type, op1, op0);
+
+ /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
+ if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
+ && !TYPE_UNSIGNED (TREE_TYPE (arg1))
+ && !(flag_wrapv || flag_trapv))
+ && (TREE_CODE (arg1) == INTEGER_CST
+ && !TREE_OVERFLOW (arg1)))
+ {
+ tree const1 = TREE_OPERAND (arg0, 1);
+ tree const2 = arg1;
+ tree variable = TREE_OPERAND (arg0, 0);
+ tree lhs;
+ int lhs_add;
+ lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
+
+ lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
+ TREE_TYPE (arg1), const2, const1);
+ if (TREE_CODE (lhs) == TREE_CODE (arg1)
+ && (TREE_CODE (lhs) != INTEGER_CST
+ || !TREE_OVERFLOW (lhs)))
+ return fold_build2 (code, type, variable, lhs);
+ }
+
+ if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
+ {
+ tree targ0 = strip_float_extensions (arg0);
+ tree targ1 = strip_float_extensions (arg1);
+ tree newtype = TREE_TYPE (targ0);
+
+ if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
+ newtype = TREE_TYPE (targ1);
+
+ /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
+ if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
+ return fold_build2 (code, type, fold_convert (newtype, targ0),
+ fold_convert (newtype, targ1));
+
+ /* (-a) CMP (-b) -> b CMP a */
+ if (TREE_CODE (arg0) == NEGATE_EXPR
+ && TREE_CODE (arg1) == NEGATE_EXPR)
+ return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
+ TREE_OPERAND (arg0, 0));
+
+ if (TREE_CODE (arg1) == REAL_CST)
+ {
+ REAL_VALUE_TYPE cst;
+ cst = TREE_REAL_CST (arg1);
+
+ /* (-a) CMP CST -> a swap(CMP) (-CST) */
+ if (TREE_CODE (arg0) == NEGATE_EXPR)
+ return fold_build2 (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 0),
+ build_real (TREE_TYPE (arg1),
+ REAL_VALUE_NEGATE (cst)));
+
+ /* IEEE doesn't distinguish +0 and -0 in comparisons. */
+ /* a CMP (-0) -> a CMP 0 */
+ if (REAL_VALUE_MINUS_ZERO (cst))
+ return fold_build2 (code, type, arg0,
+ build_real (TREE_TYPE (arg1), dconst0));
+
+ /* x != NaN is always true, other ops are always false. */
+ if (REAL_VALUE_ISNAN (cst)
+ && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
+ {
+ tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
+ return omit_one_operand (type, tem, arg0);
+ }
+
+ /* Fold comparisons against infinity. */
+ if (REAL_VALUE_ISINF (cst))
+ {
+ tem = fold_inf_compare (code, type, arg0, arg1);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+ }
+
+ /* If this is a comparison of a real constant with a PLUS_EXPR
+ or a MINUS_EXPR of a real constant, we can convert it into a
+ comparison with a revised real constant as long as no overflow
+ occurs when unsafe_math_optimizations are enabled. */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg1) == REAL_CST
+ && (TREE_CODE (arg0) == PLUS_EXPR
+ || TREE_CODE (arg0) == MINUS_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
+ ? MINUS_EXPR : PLUS_EXPR,
+ arg1, TREE_OPERAND (arg0, 1), 0))
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+
+ /* Likewise, we can simplify a comparison of a real constant with
+ a MINUS_EXPR whose first operand is also a real constant, i.e.
+ (c1 - x) < c2 becomes x > c1-c2. */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg1) == REAL_CST
+ && TREE_CODE (arg0) == MINUS_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
+ && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
+ arg1, 0))
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold_build2 (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 1), tem);
+
+ /* Fold comparisons against built-in math functions. */
+ if (TREE_CODE (arg1) == REAL_CST
+ && flag_unsafe_math_optimizations
+ && ! flag_errno_math)
+ {
+ enum built_in_function fcode = builtin_mathfn_code (arg0);
+
+ if (fcode != END_BUILTINS)
+ {
+ tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+ }
+ }
+
+ /* Convert foo++ == CONST into ++foo == CONST + INCR. */
+ if (TREE_CONSTANT (arg1)
+ && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
+ || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
+ /* This optimization is invalid for ordered comparisons
+ if CONST+INCR overflows or if foo+incr might overflow.
+ This optimization is invalid for floating point due to rounding.
+ For pointer types we assume overflow doesn't happen. */
+ && (POINTER_TYPE_P (TREE_TYPE (arg0))
+ || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+ && (code == EQ_EXPR || code == NE_EXPR))))
+ {
+ tree varop, newconst;
+
+ if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
+ {
+ newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
+ arg1, TREE_OPERAND (arg0, 1));
+ varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1));
+ }
+ else
+ {
+ newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
+ arg1, TREE_OPERAND (arg0, 1));
+ varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1));
+ }
+
+
+ /* If VAROP is a reference to a bitfield, we must mask
+ the constant by the width of the field. */
+ if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
+ && host_integerp (DECL_SIZE (TREE_OPERAND
+ (TREE_OPERAND (varop, 0), 1)), 1))
+ {
+ tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
+ HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
+ tree folded_compare, shift;
+
+ /* First check whether the comparison would come out
+ always the same. If we don't do that we would
+ change the meaning with the masking. */
+ folded_compare = fold_build2 (code, type,
+ TREE_OPERAND (varop, 0), arg1);
+ if (TREE_CODE (folded_compare) == INTEGER_CST)
+ return omit_one_operand (type, folded_compare, varop);
+
+ shift = build_int_cst (NULL_TREE,
+ TYPE_PRECISION (TREE_TYPE (varop)) - size);
+ shift = fold_convert (TREE_TYPE (varop), shift);
+ newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
+ newconst, shift);
+ newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
+ newconst, shift);
+ }
+
+ return fold_build2 (code, type, varop, newconst);
+ }
+
+ if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
+ && (TREE_CODE (arg0) == NOP_EXPR
+ || TREE_CODE (arg0) == CONVERT_EXPR))
+ {
+ /* If we are widening one operand of an integer comparison,
+ see if the other operand is similarly being widened. Perhaps we
+ can do the comparison in the narrower type. */
+ tem = fold_widened_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+
+ /* Or if we are changing signedness. */
+ tem = fold_sign_changed_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+ }
+
+ /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
+ constant, we can simplify it. */
+ if (TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_CODE (arg0) == MIN_EXPR
+ || TREE_CODE (arg0) == MAX_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ {
+ tem = optimize_minmax_comparison (code, type, op0, op1);
+ if (tem)
+ return tem;
+ }
+
+ /* Simplify comparison of something with itself. (For IEEE
+ floating-point, we can only do some of these simplifications.) */
+ if (operand_equal_p (arg0, arg1, 0))
+ {
+ switch (code)
+ {
+ case EQ_EXPR:
+ if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
+ || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
+ return constant_boolean_node (1, type);
+ break;
+
+ case GE_EXPR:
+ case LE_EXPR:
+ if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
+ || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
+ return constant_boolean_node (1, type);
+ return fold_build2 (EQ_EXPR, type, arg0, arg1);
+
+ case NE_EXPR:
+ /* For NE, we can only do this simplification if integer
+ or we don't honor IEEE floating point NaNs. */
+ if (FLOAT_TYPE_P (TREE_TYPE (arg0))
+ && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
+ break;
+ /* ... fall through ... */
+ case GT_EXPR:
+ case LT_EXPR:
+ return constant_boolean_node (0, type);
+ default:
+ gcc_unreachable ();
+ }
+ }
+
+ /* If we are comparing an expression that just has comparisons
+ of two integer values, arithmetic expressions of those comparisons,
+ and constants, we can simplify it. There are only three cases
+ to check: the two values can either be equal, the first can be
+ greater, or the second can be greater. Fold the expression for
+ those three values. Since each value must be 0 or 1, we have
+ eight possibilities, each of which corresponds to the constant 0
+ or 1 or one of the six possible comparisons.
+
+ This handles common cases like (a > b) == 0 but also handles
+ expressions like ((x > y) - (y > x)) > 0, which supposedly
+ occur in macroized code. */
+
+ if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
+ {
+ tree cval1 = 0, cval2 = 0;
+ int save_p = 0;
+
+ if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
+ /* Don't handle degenerate cases here; they should already
+ have been handled anyway. */
+ && cval1 != 0 && cval2 != 0
+ && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
+ && TREE_TYPE (cval1) == TREE_TYPE (cval2)
+ && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
+ && TYPE_MAX_VALUE (TREE_TYPE (cval1))
+ && TYPE_MAX_VALUE (TREE_TYPE (cval2))
+ && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
+ TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
+ {
+ tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
+ tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
+
+ /* We can't just pass T to eval_subst in case cval1 or cval2
+ was the same as ARG1. */
+
+ tree high_result
+ = fold_build2 (code, type,
+ eval_subst (arg0, cval1, maxval,
+ cval2, minval),
+ arg1);
+ tree equal_result
+ = fold_build2 (code, type,
+ eval_subst (arg0, cval1, maxval,
+ cval2, maxval),
+ arg1);
+ tree low_result
+ = fold_build2 (code, type,
+ eval_subst (arg0, cval1, minval,
+ cval2, maxval),
+ arg1);
+
+ /* All three of these results should be 0 or 1. Confirm they are.
+ Then use those values to select the proper code to use. */
+
+ if (TREE_CODE (high_result) == INTEGER_CST
+ && TREE_CODE (equal_result) == INTEGER_CST
+ && TREE_CODE (low_result) == INTEGER_CST)
+ {
+ /* Make a 3-bit mask with the high-order bit being the
+ value for `>', the next for '=', and the low for '<'. */
+ switch ((integer_onep (high_result) * 4)
+ + (integer_onep (equal_result) * 2)
+ + integer_onep (low_result))
+ {
+ case 0:
+ /* Always false. */
+ return omit_one_operand (type, integer_zero_node, arg0);
+ case 1:
+ code = LT_EXPR;
+ break;
+ case 2:
+ code = EQ_EXPR;
+ break;
+ case 3:
+ code = LE_EXPR;
+ break;
+ case 4:
+ code = GT_EXPR;
+ break;
+ case 5:
+ code = NE_EXPR;
+ break;
+ case 6:
+ code = GE_EXPR;
+ break;
+ case 7:
+ /* Always true. */
+ return omit_one_operand (type, integer_one_node, arg0);
+ }
+
+ if (save_p)
+ return save_expr (build2 (code, type, cval1, cval2));
+ return fold_build2 (code, type, cval1, cval2);
+ }
+ }
+ }
+
+ /* Fold a comparison of the address of COMPONENT_REFs with the same
+ type and component to a comparison of the address of the base
+ object. In short, &x->a OP &y->a to x OP y and
+ &x->a OP &y.a to x OP &y */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
+ && TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
+ {
+ tree cref0 = TREE_OPERAND (arg0, 0);
+ tree cref1 = TREE_OPERAND (arg1, 0);
+ if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
+ {
+ tree op0 = TREE_OPERAND (cref0, 0);
+ tree op1 = TREE_OPERAND (cref1, 0);
+ return fold_build2 (code, type,
+ build_fold_addr_expr (op0),
+ build_fold_addr_expr (op1));
+ }
+ }
+
+ /* We can fold X/C1 op C2 where C1 and C2 are integer constants
+ into a single range test. */
+ if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
+ || TREE_CODE (arg0) == EXACT_DIV_EXPR)
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !integer_zerop (TREE_OPERAND (arg0, 1))
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
+ && !TREE_OVERFLOW (arg1))
+ {
+ tem = fold_div_compare (code, type, arg0, arg1);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+
+ return NULL_TREE;
+}
+
+/* Fold a binary expression of code CODE and type TYPE with operands
+ OP0 and OP1. Return the folded expression if folding is
+ successful. Otherwise, return NULL_TREE. */
+
+tree
+fold_binary (enum tree_code code, tree type, tree op0, tree op1)
+{
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
+ tree arg0, arg1, tem;
+ tree t1 = NULL_TREE;
+
+ gcc_assert (IS_EXPR_CODE_CLASS (kind)
+ && TREE_CODE_LENGTH (code) == 2
+ && op0 != NULL_TREE
+ && op1 != NULL_TREE);
+
+ arg0 = op0;
+ arg1 = op1;
+
+ /* Strip any conversions that don't change the mode. This is
+ safe for every expression, except for a comparison expression
+ because its signedness is derived from its operands. So, in
+ the latter case, only strip conversions that don't change the
+ signedness.
+
+ Note that this is done as an internal manipulation within the
+ constant folder, in order to find the simplest representation
+ of the arguments so that their form can be studied. In any
+ cases, the appropriate type conversions should be put back in
+ the tree that will get out of the constant folder. */
+
+ if (kind == tcc_comparison)
+ {
+ STRIP_SIGN_NOPS (arg0);
+ STRIP_SIGN_NOPS (arg1);
+ }
+ else
+ {
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+ }
+
+ /* Note that TREE_CONSTANT isn't enough: static var addresses are
+ constant but we can't do arithmetic on them. */
+ if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+ || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
+ || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
+ || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
+ {
+ if (kind == tcc_binary)
+ tem = const_binop (code, arg0, arg1, 0);
+ else if (kind == tcc_comparison)
+ tem = fold_relational_const (code, type, arg0, arg1);
+ else
+ tem = NULL_TREE;
+
+ if (tem != NULL_TREE)
+ {
+ if (TREE_TYPE (tem) != type)
+ tem = fold_convert (type, tem);
+ return tem;
+ }
+ }
+
+ /* If this is a commutative operation, and ARG0 is a constant, move it
+ to ARG1 to reduce the number of tests below. */
+ if (commutative_tree_code (code)
+ && tree_swap_operands_p (arg0, arg1, true))
+ return fold_build2 (code, type, op1, op0);
+
+ /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
+
+ First check for cases where an arithmetic operation is applied to a
+ compound, conditional, or comparison operation. Push the arithmetic
+ operation inside the compound or conditional to see if any folding
+ can then be done. Convert comparison to conditional for this purpose.
+ The also optimizes non-constant cases that used to be done in
+ expand_expr.
+
+ Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
+ one of the operands is a comparison and the other is a comparison, a
+ BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
+ code below would make the expression more complex. Change it to a
+ TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
+ TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
+
+ if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
+ || code == EQ_EXPR || code == NE_EXPR)
+ && ((truth_value_p (TREE_CODE (arg0))
+ && (truth_value_p (TREE_CODE (arg1))
+ || (TREE_CODE (arg1) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (arg1, 1)))))
+ || (truth_value_p (TREE_CODE (arg1))
+ && (truth_value_p (TREE_CODE (arg0))
+ || (TREE_CODE (arg0) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1)))))))
+ {
+ tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
+ : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
+ : TRUTH_XOR_EXPR,
+ boolean_type_node,
+ fold_convert (boolean_type_node, arg0),
+ fold_convert (boolean_type_node, arg1));
+
+ if (code == EQ_EXPR)
+ tem = invert_truthvalue (tem);
+
+ return fold_convert (type, tem);
+ }
+
+ if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison)
+ {
+ if (TREE_CODE (arg0) == COMPOUND_EXPR)
+ return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
+ fold_build2 (code, type,
+ TREE_OPERAND (arg0, 1), op1));
+ if (TREE_CODE (arg1) == COMPOUND_EXPR
+ && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
+ return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
+ fold_build2 (code, type,
+ op0, TREE_OPERAND (arg1, 1)));
+
+ if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
+ {
+ tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
+ arg0, arg1,
+ /*cond_first_p=*/1);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+
+ if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
+ {
+ tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
+ arg1, arg0,
+ /*cond_first_p=*/0);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+ }
+
+ switch (code)
+ {
+ case PLUS_EXPR:
+ /* A + (-B) -> A - B */
+ if (TREE_CODE (arg1) == NEGATE_EXPR)
+ return fold_build2 (MINUS_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, TREE_OPERAND (arg1, 0)));
+ /* (-A) + B -> B - A */
+ if (TREE_CODE (arg0) == NEGATE_EXPR
+ && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
+ return fold_build2 (MINUS_EXPR, type,
+ fold_convert (type, arg1),
+ fold_convert (type, TREE_OPERAND (arg0, 0)));
+ /* Convert ~A + 1 to -A. */
+ if (INTEGRAL_TYPE_P (type)
+ && TREE_CODE (arg0) == BIT_NOT_EXPR
+ && integer_onep (arg1))
+ return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
+
+ /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
+ same or one. */
+ if ((TREE_CODE (arg0) == MULT_EXPR
+ || TREE_CODE (arg1) == MULT_EXPR)
+ && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
+ {
+ tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+ }
+
+ if (! FLOAT_TYPE_P (type))
+ {
+ if (integer_zerop (arg1))
+ return non_lvalue (fold_convert (type, arg0));
+
+ /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
+ with a constant, and the two constants have no bits in common,
+ we should treat this as a BIT_IOR_EXPR since this may produce more