1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* If ARG2 divides ARG1 with zero remainder, carries out the division
156 of type CODE and returns the quotient.
157 Otherwise returns NULL_TREE. */
160 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
165 /* The sign of the division is according to operand two, that
166 does the correct thing for POINTER_PLUS_EXPR where we want
167 a signed division. */
168 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
169 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
170 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
173 quo = double_int_divmod (tree_to_double_int (arg1),
174 tree_to_double_int (arg2),
177 if (double_int_zero_p (rem))
178 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
183 /* This is nonzero if we should defer warnings about undefined
184 overflow. This facility exists because these warnings are a
185 special case. The code to estimate loop iterations does not want
186 to issue any warnings, since it works with expressions which do not
187 occur in user code. Various bits of cleanup code call fold(), but
188 only use the result if it has certain characteristics (e.g., is a
189 constant); that code only wants to issue a warning if the result is
192 static int fold_deferring_overflow_warnings;
194 /* If a warning about undefined overflow is deferred, this is the
195 warning. Note that this may cause us to turn two warnings into
196 one, but that is fine since it is sufficient to only give one
197 warning per expression. */
199 static const char* fold_deferred_overflow_warning;
201 /* If a warning about undefined overflow is deferred, this is the
202 level at which the warning should be emitted. */
204 static enum warn_strict_overflow_code fold_deferred_overflow_code;
206 /* Start deferring overflow warnings. We could use a stack here to
207 permit nested calls, but at present it is not necessary. */
210 fold_defer_overflow_warnings (void)
212 ++fold_deferring_overflow_warnings;
215 /* Stop deferring overflow warnings. If there is a pending warning,
216 and ISSUE is true, then issue the warning if appropriate. STMT is
217 the statement with which the warning should be associated (used for
218 location information); STMT may be NULL. CODE is the level of the
219 warning--a warn_strict_overflow_code value. This function will use
220 the smaller of CODE and the deferred code when deciding whether to
221 issue the warning. CODE may be zero to mean to always use the
225 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
230 gcc_assert (fold_deferring_overflow_warnings > 0);
231 --fold_deferring_overflow_warnings;
232 if (fold_deferring_overflow_warnings > 0)
234 if (fold_deferred_overflow_warning != NULL
236 && code < (int) fold_deferred_overflow_code)
237 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
241 warnmsg = fold_deferred_overflow_warning;
242 fold_deferred_overflow_warning = NULL;
244 if (!issue || warnmsg == NULL)
247 if (gimple_no_warning_p (stmt))
250 /* Use the smallest code level when deciding to issue the
252 if (code == 0 || code > (int) fold_deferred_overflow_code)
253 code = fold_deferred_overflow_code;
255 if (!issue_strict_overflow_warning (code))
259 locus = input_location;
261 locus = gimple_location (stmt);
262 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
265 /* Stop deferring overflow warnings, ignoring any deferred
269 fold_undefer_and_ignore_overflow_warnings (void)
271 fold_undefer_overflow_warnings (false, NULL, 0);
274 /* Whether we are deferring overflow warnings. */
277 fold_deferring_overflow_warnings_p (void)
279 return fold_deferring_overflow_warnings > 0;
282 /* This is called when we fold something based on the fact that signed
283 overflow is undefined. */
286 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
288 if (fold_deferring_overflow_warnings > 0)
290 if (fold_deferred_overflow_warning == NULL
291 || wc < fold_deferred_overflow_code)
293 fold_deferred_overflow_warning = gmsgid;
294 fold_deferred_overflow_code = wc;
297 else if (issue_strict_overflow_warning (wc))
298 warning (OPT_Wstrict_overflow, gmsgid);
301 /* Return true if the built-in mathematical function specified by CODE
302 is odd, i.e. -f(x) == f(-x). */
305 negate_mathfn_p (enum built_in_function code)
309 CASE_FLT_FN (BUILT_IN_ASIN):
310 CASE_FLT_FN (BUILT_IN_ASINH):
311 CASE_FLT_FN (BUILT_IN_ATAN):
312 CASE_FLT_FN (BUILT_IN_ATANH):
313 CASE_FLT_FN (BUILT_IN_CASIN):
314 CASE_FLT_FN (BUILT_IN_CASINH):
315 CASE_FLT_FN (BUILT_IN_CATAN):
316 CASE_FLT_FN (BUILT_IN_CATANH):
317 CASE_FLT_FN (BUILT_IN_CBRT):
318 CASE_FLT_FN (BUILT_IN_CPROJ):
319 CASE_FLT_FN (BUILT_IN_CSIN):
320 CASE_FLT_FN (BUILT_IN_CSINH):
321 CASE_FLT_FN (BUILT_IN_CTAN):
322 CASE_FLT_FN (BUILT_IN_CTANH):
323 CASE_FLT_FN (BUILT_IN_ERF):
324 CASE_FLT_FN (BUILT_IN_LLROUND):
325 CASE_FLT_FN (BUILT_IN_LROUND):
326 CASE_FLT_FN (BUILT_IN_ROUND):
327 CASE_FLT_FN (BUILT_IN_SIN):
328 CASE_FLT_FN (BUILT_IN_SINH):
329 CASE_FLT_FN (BUILT_IN_TAN):
330 CASE_FLT_FN (BUILT_IN_TANH):
331 CASE_FLT_FN (BUILT_IN_TRUNC):
334 CASE_FLT_FN (BUILT_IN_LLRINT):
335 CASE_FLT_FN (BUILT_IN_LRINT):
336 CASE_FLT_FN (BUILT_IN_NEARBYINT):
337 CASE_FLT_FN (BUILT_IN_RINT):
338 return !flag_rounding_math;
346 /* Check whether we may negate an integer constant T without causing
350 may_negate_without_overflow_p (const_tree t)
352 unsigned HOST_WIDE_INT val;
356 gcc_assert (TREE_CODE (t) == INTEGER_CST);
358 type = TREE_TYPE (t);
359 if (TYPE_UNSIGNED (type))
362 prec = TYPE_PRECISION (type);
363 if (prec > HOST_BITS_PER_WIDE_INT)
365 if (TREE_INT_CST_LOW (t) != 0)
367 prec -= HOST_BITS_PER_WIDE_INT;
368 val = TREE_INT_CST_HIGH (t);
371 val = TREE_INT_CST_LOW (t);
372 if (prec < HOST_BITS_PER_WIDE_INT)
373 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
374 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
377 /* Determine whether an expression T can be cheaply negated using
378 the function negate_expr without introducing undefined overflow. */
381 negate_expr_p (tree t)
388 type = TREE_TYPE (t);
391 switch (TREE_CODE (t))
394 if (TYPE_OVERFLOW_WRAPS (type))
397 /* Check that -CST will not overflow type. */
398 return may_negate_without_overflow_p (t);
400 return (INTEGRAL_TYPE_P (type)
401 && TYPE_OVERFLOW_WRAPS (type));
408 /* We want to canonicalize to positive real constants. Pretend
409 that only negative ones can be easily negated. */
410 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
413 return negate_expr_p (TREE_REALPART (t))
414 && negate_expr_p (TREE_IMAGPART (t));
417 return negate_expr_p (TREE_OPERAND (t, 0))
418 && negate_expr_p (TREE_OPERAND (t, 1));
421 return negate_expr_p (TREE_OPERAND (t, 0));
424 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
425 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
427 /* -(A + B) -> (-B) - A. */
428 if (negate_expr_p (TREE_OPERAND (t, 1))
429 && reorder_operands_p (TREE_OPERAND (t, 0),
430 TREE_OPERAND (t, 1)))
432 /* -(A + B) -> (-A) - B. */
433 return negate_expr_p (TREE_OPERAND (t, 0));
436 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
437 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
438 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1));
443 if (TYPE_UNSIGNED (TREE_TYPE (t)))
449 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
450 return negate_expr_p (TREE_OPERAND (t, 1))
451 || negate_expr_p (TREE_OPERAND (t, 0));
459 /* In general we can't negate A / B, because if A is INT_MIN and
460 B is 1, we may turn this into INT_MIN / -1 which is undefined
461 and actually traps on some architectures. But if overflow is
462 undefined, we can negate, because - (INT_MIN / 1) is an
464 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
465 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
471 /* Negate -((double)float) as (double)(-float). */
472 if (TREE_CODE (type) == REAL_TYPE)
474 tree tem = strip_float_extensions (t);
476 return negate_expr_p (tem);
481 /* Negate -f(x) as f(-x). */
482 if (negate_mathfn_p (builtin_mathfn_code (t)))
483 return negate_expr_p (CALL_EXPR_ARG (t, 0));
487 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
488 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
490 tree op1 = TREE_OPERAND (t, 1);
491 if (TREE_INT_CST_HIGH (op1) == 0
492 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
493 == TREE_INT_CST_LOW (op1))
504 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
505 simplification is possible.
506 If negate_expr_p would return true for T, NULL_TREE will never be
510 fold_negate_expr (location_t loc, tree t)
512 tree type = TREE_TYPE (t);
515 switch (TREE_CODE (t))
517 /* Convert - (~A) to A + 1. */
519 if (INTEGRAL_TYPE_P (type))
520 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
521 build_int_cst (type, 1));
525 tem = fold_negate_const (t, type);
526 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
527 || !TYPE_OVERFLOW_TRAPS (type))
532 tem = fold_negate_const (t, type);
533 /* Two's complement FP formats, such as c4x, may overflow. */
534 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
539 tem = fold_negate_const (t, type);
544 tree rpart = negate_expr (TREE_REALPART (t));
545 tree ipart = negate_expr (TREE_IMAGPART (t));
547 if ((TREE_CODE (rpart) == REAL_CST
548 && TREE_CODE (ipart) == REAL_CST)
549 || (TREE_CODE (rpart) == INTEGER_CST
550 && TREE_CODE (ipart) == INTEGER_CST))
551 return build_complex (type, rpart, ipart);
556 if (negate_expr_p (t))
557 return fold_build2_loc (loc, COMPLEX_EXPR, type,
558 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
559 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
563 if (negate_expr_p (t))
564 return fold_build1_loc (loc, CONJ_EXPR, type,
565 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
569 return TREE_OPERAND (t, 0);
572 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
573 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
575 /* -(A + B) -> (-B) - A. */
576 if (negate_expr_p (TREE_OPERAND (t, 1))
577 && reorder_operands_p (TREE_OPERAND (t, 0),
578 TREE_OPERAND (t, 1)))
580 tem = negate_expr (TREE_OPERAND (t, 1));
581 return fold_build2_loc (loc, MINUS_EXPR, type,
582 tem, TREE_OPERAND (t, 0));
585 /* -(A + B) -> (-A) - B. */
586 if (negate_expr_p (TREE_OPERAND (t, 0)))
588 tem = negate_expr (TREE_OPERAND (t, 0));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 1));
596 /* - (A - B) -> B - A */
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
599 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
600 return fold_build2_loc (loc, MINUS_EXPR, type,
601 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
605 if (TYPE_UNSIGNED (type))
611 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
613 tem = TREE_OPERAND (t, 1);
614 if (negate_expr_p (tem))
615 return fold_build2_loc (loc, TREE_CODE (t), type,
616 TREE_OPERAND (t, 0), negate_expr (tem));
617 tem = TREE_OPERAND (t, 0);
618 if (negate_expr_p (tem))
619 return fold_build2_loc (loc, TREE_CODE (t), type,
620 negate_expr (tem), TREE_OPERAND (t, 1));
629 /* In general we can't negate A / B, because if A is INT_MIN and
630 B is 1, we may turn this into INT_MIN / -1 which is undefined
631 and actually traps on some architectures. But if overflow is
632 undefined, we can negate, because - (INT_MIN / 1) is an
634 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
636 const char * const warnmsg = G_("assuming signed overflow does not "
637 "occur when negating a division");
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
641 if (INTEGRAL_TYPE_P (type)
642 && (TREE_CODE (tem) != INTEGER_CST
643 || integer_onep (tem)))
644 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
648 tem = TREE_OPERAND (t, 0);
649 if (negate_expr_p (tem))
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 negate_expr (tem), TREE_OPERAND (t, 1));
662 /* Convert -((double)float) into (double)(-float). */
663 if (TREE_CODE (type) == REAL_TYPE)
665 tem = strip_float_extensions (t);
666 if (tem != t && negate_expr_p (tem))
667 return fold_convert_loc (loc, type, negate_expr (tem));
672 /* Negate -f(x) as f(-x). */
673 if (negate_mathfn_p (builtin_mathfn_code (t))
674 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
678 fndecl = get_callee_fndecl (t);
679 arg = negate_expr (CALL_EXPR_ARG (t, 0));
680 return build_call_expr_loc (loc, fndecl, 1, arg);
685 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
686 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
688 tree op1 = TREE_OPERAND (t, 1);
689 if (TREE_INT_CST_HIGH (op1) == 0
690 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
691 == TREE_INT_CST_LOW (op1))
693 tree ntype = TYPE_UNSIGNED (type)
694 ? signed_type_for (type)
695 : unsigned_type_for (type);
696 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
697 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
698 return fold_convert_loc (loc, type, temp);
710 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
711 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
723 loc = EXPR_LOCATION (t);
724 type = TREE_TYPE (t);
727 tem = fold_negate_expr (loc, t);
730 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
731 SET_EXPR_LOCATION (tem, loc);
733 return fold_convert_loc (loc, type, tem);
736 /* Split a tree IN into a constant, literal and variable parts that could be
737 combined with CODE to make IN. "constant" means an expression with
738 TREE_CONSTANT but that isn't an actual constant. CODE must be a
739 commutative arithmetic operation. Store the constant part into *CONP,
740 the literal in *LITP and return the variable part. If a part isn't
741 present, set it to null. If the tree does not decompose in this way,
742 return the entire tree as the variable part and the other parts as null.
744 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
745 case, we negate an operand that was subtracted. Except if it is a
746 literal for which we use *MINUS_LITP instead.
748 If NEGATE_P is true, we are negating all of IN, again except a literal
749 for which we use *MINUS_LITP instead.
751 If IN is itself a literal or constant, return it as appropriate.
753 Note that we do not guarantee that any of the three values will be the
754 same type as IN, but they will have the same signedness and mode. */
757 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
758 tree *minus_litp, int negate_p)
766 /* Strip any conversions that don't change the machine mode or signedness. */
767 STRIP_SIGN_NOPS (in);
769 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
770 || TREE_CODE (in) == FIXED_CST)
772 else if (TREE_CODE (in) == code
773 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
774 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
775 /* We can associate addition and subtraction together (even
776 though the C standard doesn't say so) for integers because
777 the value is not affected. For reals, the value might be
778 affected, so we can't. */
779 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
780 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
782 tree op0 = TREE_OPERAND (in, 0);
783 tree op1 = TREE_OPERAND (in, 1);
784 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
785 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
787 /* First see if either of the operands is a literal, then a constant. */
788 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
789 || TREE_CODE (op0) == FIXED_CST)
790 *litp = op0, op0 = 0;
791 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
792 || TREE_CODE (op1) == FIXED_CST)
793 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
795 if (op0 != 0 && TREE_CONSTANT (op0))
796 *conp = op0, op0 = 0;
797 else if (op1 != 0 && TREE_CONSTANT (op1))
798 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
800 /* If we haven't dealt with either operand, this is not a case we can
801 decompose. Otherwise, VAR is either of the ones remaining, if any. */
802 if (op0 != 0 && op1 != 0)
807 var = op1, neg_var_p = neg1_p;
809 /* Now do any needed negations. */
811 *minus_litp = *litp, *litp = 0;
813 *conp = negate_expr (*conp);
815 var = negate_expr (var);
817 else if (TREE_CONSTANT (in))
825 *minus_litp = *litp, *litp = 0;
826 else if (*minus_litp)
827 *litp = *minus_litp, *minus_litp = 0;
828 *conp = negate_expr (*conp);
829 var = negate_expr (var);
835 /* Re-associate trees split by the above function. T1 and T2 are
836 either expressions to associate or null. Return the new
837 expression, if any. LOC is the location of the new expression. If
838 we build an operation, do it in TYPE and with CODE. */
841 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
850 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
851 try to fold this since we will have infinite recursion. But do
852 deal with any NEGATE_EXPRs. */
853 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
854 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
856 if (code == PLUS_EXPR)
858 if (TREE_CODE (t1) == NEGATE_EXPR)
859 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
860 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
861 else if (TREE_CODE (t2) == NEGATE_EXPR)
862 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
863 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
864 else if (integer_zerop (t2))
865 return fold_convert_loc (loc, type, t1);
867 else if (code == MINUS_EXPR)
869 if (integer_zerop (t2))
870 return fold_convert_loc (loc, type, t1);
873 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
874 fold_convert_loc (loc, type, t2));
875 goto associate_trees_exit;
878 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
879 fold_convert_loc (loc, type, t2));
880 associate_trees_exit:
881 protected_set_expr_location (tem, loc);
885 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
886 for use in int_const_binop, size_binop and size_diffop. */
889 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
891 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
893 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
908 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
909 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
910 && TYPE_MODE (type1) == TYPE_MODE (type2);
914 /* Combine two integer constants ARG1 and ARG2 under operation CODE
915 to produce a new constant. Return NULL_TREE if we don't know how
916 to evaluate CODE at compile-time.
918 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
921 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
923 double_int op1, op2, res, tmp;
925 tree type = TREE_TYPE (arg1);
926 bool uns = TYPE_UNSIGNED (type);
928 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
929 bool overflow = false;
931 op1 = tree_to_double_int (arg1);
932 op2 = tree_to_double_int (arg2);
937 res = double_int_ior (op1, op2);
941 res = double_int_xor (op1, op2);
945 res = double_int_and (op1, op2);
949 res = double_int_rshift (op1, double_int_to_shwi (op2),
950 TYPE_PRECISION (type), !uns);
954 /* It's unclear from the C standard whether shifts can overflow.
955 The following code ignores overflow; perhaps a C standard
956 interpretation ruling is needed. */
957 res = double_int_lshift (op1, double_int_to_shwi (op2),
958 TYPE_PRECISION (type), !uns);
962 res = double_int_rrotate (op1, double_int_to_shwi (op2),
963 TYPE_PRECISION (type));
967 res = double_int_lrotate (op1, double_int_to_shwi (op2),
968 TYPE_PRECISION (type));
972 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
973 &res.low, &res.high);
977 neg_double (op2.low, op2.high, &res.low, &res.high);
978 add_double (op1.low, op1.high, res.low, res.high,
979 &res.low, &res.high);
980 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
984 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
985 &res.low, &res.high);
989 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
991 /* This is a shortcut for a common special case. */
992 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
993 && !TREE_OVERFLOW (arg1)
994 && !TREE_OVERFLOW (arg2)
995 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
997 if (code == CEIL_DIV_EXPR)
998 op1.low += op2.low - 1;
1000 res.low = op1.low / op2.low, res.high = 0;
1004 /* ... fall through ... */
1006 case ROUND_DIV_EXPR:
1007 if (double_int_zero_p (op2))
1009 if (double_int_one_p (op2))
1014 if (double_int_equal_p (op1, op2)
1015 && ! double_int_zero_p (op1))
1017 res = double_int_one;
1020 overflow = div_and_round_double (code, uns,
1021 op1.low, op1.high, op2.low, op2.high,
1022 &res.low, &res.high,
1023 &tmp.low, &tmp.high);
1026 case TRUNC_MOD_EXPR:
1027 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1028 /* This is a shortcut for a common special case. */
1029 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1030 && !TREE_OVERFLOW (arg1)
1031 && !TREE_OVERFLOW (arg2)
1032 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1034 if (code == CEIL_MOD_EXPR)
1035 op1.low += op2.low - 1;
1036 res.low = op1.low % op2.low, res.high = 0;
1040 /* ... fall through ... */
1042 case ROUND_MOD_EXPR:
1043 if (double_int_zero_p (op2))
1045 overflow = div_and_round_double (code, uns,
1046 op1.low, op1.high, op2.low, op2.high,
1047 &tmp.low, &tmp.high,
1048 &res.low, &res.high);
1052 res = double_int_min (op1, op2, uns);
1056 res = double_int_max (op1, op2, uns);
1065 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1067 /* Propagate overflow flags ourselves. */
1068 if (((!uns || is_sizetype) && overflow)
1069 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1072 TREE_OVERFLOW (t) = 1;
1076 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1077 ((!uns || is_sizetype) && overflow)
1078 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1083 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1084 constant. We assume ARG1 and ARG2 have the same data type, or at least
1085 are the same kind of constant and the same machine mode. Return zero if
1086 combining the constants is not allowed in the current operating mode. */
1089 const_binop (enum tree_code code, tree arg1, tree arg2)
1091 /* Sanity check for the recursive cases. */
1098 if (TREE_CODE (arg1) == INTEGER_CST)
1099 return int_const_binop (code, arg1, arg2, 0);
1101 if (TREE_CODE (arg1) == REAL_CST)
1103 enum machine_mode mode;
1106 REAL_VALUE_TYPE value;
1107 REAL_VALUE_TYPE result;
1111 /* The following codes are handled by real_arithmetic. */
1126 d1 = TREE_REAL_CST (arg1);
1127 d2 = TREE_REAL_CST (arg2);
1129 type = TREE_TYPE (arg1);
1130 mode = TYPE_MODE (type);
1132 /* Don't perform operation if we honor signaling NaNs and
1133 either operand is a NaN. */
1134 if (HONOR_SNANS (mode)
1135 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1138 /* Don't perform operation if it would raise a division
1139 by zero exception. */
1140 if (code == RDIV_EXPR
1141 && REAL_VALUES_EQUAL (d2, dconst0)
1142 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1145 /* If either operand is a NaN, just return it. Otherwise, set up
1146 for floating-point trap; we return an overflow. */
1147 if (REAL_VALUE_ISNAN (d1))
1149 else if (REAL_VALUE_ISNAN (d2))
1152 inexact = real_arithmetic (&value, code, &d1, &d2);
1153 real_convert (&result, mode, &value);
1155 /* Don't constant fold this floating point operation if
1156 the result has overflowed and flag_trapping_math. */
1157 if (flag_trapping_math
1158 && MODE_HAS_INFINITIES (mode)
1159 && REAL_VALUE_ISINF (result)
1160 && !REAL_VALUE_ISINF (d1)
1161 && !REAL_VALUE_ISINF (d2))
1164 /* Don't constant fold this floating point operation if the
1165 result may dependent upon the run-time rounding mode and
1166 flag_rounding_math is set, or if GCC's software emulation
1167 is unable to accurately represent the result. */
1168 if ((flag_rounding_math
1169 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1170 && (inexact || !real_identical (&result, &value)))
1173 t = build_real (type, result);
1175 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1179 if (TREE_CODE (arg1) == FIXED_CST)
1181 FIXED_VALUE_TYPE f1;
1182 FIXED_VALUE_TYPE f2;
1183 FIXED_VALUE_TYPE result;
1188 /* The following codes are handled by fixed_arithmetic. */
1194 case TRUNC_DIV_EXPR:
1195 f2 = TREE_FIXED_CST (arg2);
1200 f2.data.high = TREE_INT_CST_HIGH (arg2);
1201 f2.data.low = TREE_INT_CST_LOW (arg2);
1209 f1 = TREE_FIXED_CST (arg1);
1210 type = TREE_TYPE (arg1);
1211 sat_p = TYPE_SATURATING (type);
1212 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1213 t = build_fixed (type, result);
1214 /* Propagate overflow flags. */
1215 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1216 TREE_OVERFLOW (t) = 1;
1220 if (TREE_CODE (arg1) == COMPLEX_CST)
1222 tree type = TREE_TYPE (arg1);
1223 tree r1 = TREE_REALPART (arg1);
1224 tree i1 = TREE_IMAGPART (arg1);
1225 tree r2 = TREE_REALPART (arg2);
1226 tree i2 = TREE_IMAGPART (arg2);
1233 real = const_binop (code, r1, r2);
1234 imag = const_binop (code, i1, i2);
1238 if (COMPLEX_FLOAT_TYPE_P (type))
1239 return do_mpc_arg2 (arg1, arg2, type,
1240 /* do_nonfinite= */ folding_initializer,
1243 real = const_binop (MINUS_EXPR,
1244 const_binop (MULT_EXPR, r1, r2),
1245 const_binop (MULT_EXPR, i1, i2));
1246 imag = const_binop (PLUS_EXPR,
1247 const_binop (MULT_EXPR, r1, i2),
1248 const_binop (MULT_EXPR, i1, r2));
1252 if (COMPLEX_FLOAT_TYPE_P (type))
1253 return do_mpc_arg2 (arg1, arg2, type,
1254 /* do_nonfinite= */ folding_initializer,
1257 case TRUNC_DIV_EXPR:
1259 case FLOOR_DIV_EXPR:
1260 case ROUND_DIV_EXPR:
1261 if (flag_complex_method == 0)
1263 /* Keep this algorithm in sync with
1264 tree-complex.c:expand_complex_div_straight().
1266 Expand complex division to scalars, straightforward algorithm.
1267 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1271 = const_binop (PLUS_EXPR,
1272 const_binop (MULT_EXPR, r2, r2),
1273 const_binop (MULT_EXPR, i2, i2));
1275 = const_binop (PLUS_EXPR,
1276 const_binop (MULT_EXPR, r1, r2),
1277 const_binop (MULT_EXPR, i1, i2));
1279 = const_binop (MINUS_EXPR,
1280 const_binop (MULT_EXPR, i1, r2),
1281 const_binop (MULT_EXPR, r1, i2));
1283 real = const_binop (code, t1, magsquared);
1284 imag = const_binop (code, t2, magsquared);
1288 /* Keep this algorithm in sync with
1289 tree-complex.c:expand_complex_div_wide().
1291 Expand complex division to scalars, modified algorithm to minimize
1292 overflow with wide input ranges. */
1293 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1294 fold_abs_const (r2, TREE_TYPE (type)),
1295 fold_abs_const (i2, TREE_TYPE (type)));
1297 if (integer_nonzerop (compare))
1299 /* In the TRUE branch, we compute
1301 div = (br * ratio) + bi;
1302 tr = (ar * ratio) + ai;
1303 ti = (ai * ratio) - ar;
1306 tree ratio = const_binop (code, r2, i2);
1307 tree div = const_binop (PLUS_EXPR, i2,
1308 const_binop (MULT_EXPR, r2, ratio));
1309 real = const_binop (MULT_EXPR, r1, ratio);
1310 real = const_binop (PLUS_EXPR, real, i1);
1311 real = const_binop (code, real, div);
1313 imag = const_binop (MULT_EXPR, i1, ratio);
1314 imag = const_binop (MINUS_EXPR, imag, r1);
1315 imag = const_binop (code, imag, div);
1319 /* In the FALSE branch, we compute
1321 divisor = (d * ratio) + c;
1322 tr = (b * ratio) + a;
1323 ti = b - (a * ratio);
1326 tree ratio = const_binop (code, i2, r2);
1327 tree div = const_binop (PLUS_EXPR, r2,
1328 const_binop (MULT_EXPR, i2, ratio));
1330 real = const_binop (MULT_EXPR, i1, ratio);
1331 real = const_binop (PLUS_EXPR, real, r1);
1332 real = const_binop (code, real, div);
1334 imag = const_binop (MULT_EXPR, r1, ratio);
1335 imag = const_binop (MINUS_EXPR, i1, imag);
1336 imag = const_binop (code, imag, div);
1346 return build_complex (type, real, imag);
1349 if (TREE_CODE (arg1) == VECTOR_CST)
1351 tree type = TREE_TYPE(arg1);
1352 int count = TYPE_VECTOR_SUBPARTS (type), i;
1353 tree elements1, elements2, list = NULL_TREE;
1355 if(TREE_CODE(arg2) != VECTOR_CST)
1358 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1359 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1361 for (i = 0; i < count; i++)
1363 tree elem1, elem2, elem;
1365 /* The trailing elements can be empty and should be treated as 0 */
1367 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1370 elem1 = TREE_VALUE(elements1);
1371 elements1 = TREE_CHAIN (elements1);
1375 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1378 elem2 = TREE_VALUE(elements2);
1379 elements2 = TREE_CHAIN (elements2);
1382 elem = const_binop (code, elem1, elem2);
1384 /* It is possible that const_binop cannot handle the given
1385 code and return NULL_TREE */
1386 if(elem == NULL_TREE)
1389 list = tree_cons (NULL_TREE, elem, list);
1391 return build_vector(type, nreverse(list));
1396 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1397 indicates which particular sizetype to create. */
1400 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1402 return build_int_cst (sizetype_tab[(int) kind], number);
1405 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1406 is a tree code. The type of the result is taken from the operands.
1407 Both must be equivalent integer types, ala int_binop_types_match_p.
1408 If the operands are constant, so is the result. */
1411 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1413 tree type = TREE_TYPE (arg0);
1415 if (arg0 == error_mark_node || arg1 == error_mark_node)
1416 return error_mark_node;
1418 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1421 /* Handle the special case of two integer constants faster. */
1422 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1424 /* And some specific cases even faster than that. */
1425 if (code == PLUS_EXPR)
1427 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1429 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1432 else if (code == MINUS_EXPR)
1434 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1437 else if (code == MULT_EXPR)
1439 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1443 /* Handle general case of two integer constants. */
1444 return int_const_binop (code, arg0, arg1, 0);
1447 return fold_build2_loc (loc, code, type, arg0, arg1);
1450 /* Given two values, either both of sizetype or both of bitsizetype,
1451 compute the difference between the two values. Return the value
1452 in signed type corresponding to the type of the operands. */
1455 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1457 tree type = TREE_TYPE (arg0);
1460 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1463 /* If the type is already signed, just do the simple thing. */
1464 if (!TYPE_UNSIGNED (type))
1465 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1467 if (type == sizetype)
1469 else if (type == bitsizetype)
1470 ctype = sbitsizetype;
1472 ctype = signed_type_for (type);
1474 /* If either operand is not a constant, do the conversions to the signed
1475 type and subtract. The hardware will do the right thing with any
1476 overflow in the subtraction. */
1477 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1478 return size_binop_loc (loc, MINUS_EXPR,
1479 fold_convert_loc (loc, ctype, arg0),
1480 fold_convert_loc (loc, ctype, arg1));
1482 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1483 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1484 overflow) and negate (which can't either). Special-case a result
1485 of zero while we're here. */
1486 if (tree_int_cst_equal (arg0, arg1))
1487 return build_int_cst (ctype, 0);
1488 else if (tree_int_cst_lt (arg1, arg0))
1489 return fold_convert_loc (loc, ctype,
1490 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1492 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1493 fold_convert_loc (loc, ctype,
1494 size_binop_loc (loc,
1499 /* A subroutine of fold_convert_const handling conversions of an
1500 INTEGER_CST to another integer type. */
1503 fold_convert_const_int_from_int (tree type, const_tree arg1)
1507 /* Given an integer constant, make new constant with new type,
1508 appropriately sign-extended or truncated. */
1509 t = force_fit_type_double (type, tree_to_double_int (arg1),
1510 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1511 (TREE_INT_CST_HIGH (arg1) < 0
1512 && (TYPE_UNSIGNED (type)
1513 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1514 | TREE_OVERFLOW (arg1));
1519 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1520 to an integer type. */
1523 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1528 /* The following code implements the floating point to integer
1529 conversion rules required by the Java Language Specification,
1530 that IEEE NaNs are mapped to zero and values that overflow
1531 the target precision saturate, i.e. values greater than
1532 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1533 are mapped to INT_MIN. These semantics are allowed by the
1534 C and C++ standards that simply state that the behavior of
1535 FP-to-integer conversion is unspecified upon overflow. */
1539 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1543 case FIX_TRUNC_EXPR:
1544 real_trunc (&r, VOIDmode, &x);
1551 /* If R is NaN, return zero and show we have an overflow. */
1552 if (REAL_VALUE_ISNAN (r))
1555 val = double_int_zero;
1558 /* See if R is less than the lower bound or greater than the
1563 tree lt = TYPE_MIN_VALUE (type);
1564 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1565 if (REAL_VALUES_LESS (r, l))
1568 val = tree_to_double_int (lt);
1574 tree ut = TYPE_MAX_VALUE (type);
1577 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1578 if (REAL_VALUES_LESS (u, r))
1581 val = tree_to_double_int (ut);
1587 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1589 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1593 /* A subroutine of fold_convert_const handling conversions of a
1594 FIXED_CST to an integer type. */
1597 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1600 double_int temp, temp_trunc;
1603 /* Right shift FIXED_CST to temp by fbit. */
1604 temp = TREE_FIXED_CST (arg1).data;
1605 mode = TREE_FIXED_CST (arg1).mode;
1606 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1608 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1609 HOST_BITS_PER_DOUBLE_INT,
1610 SIGNED_FIXED_POINT_MODE_P (mode));
1612 /* Left shift temp to temp_trunc by fbit. */
1613 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1614 HOST_BITS_PER_DOUBLE_INT,
1615 SIGNED_FIXED_POINT_MODE_P (mode));
1619 temp = double_int_zero;
1620 temp_trunc = double_int_zero;
1623 /* If FIXED_CST is negative, we need to round the value toward 0.
1624 By checking if the fractional bits are not zero to add 1 to temp. */
1625 if (SIGNED_FIXED_POINT_MODE_P (mode)
1626 && double_int_negative_p (temp_trunc)
1627 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1628 temp = double_int_add (temp, double_int_one);
1630 /* Given a fixed-point constant, make new constant with new type,
1631 appropriately sign-extended or truncated. */
1632 t = force_fit_type_double (type, temp, -1,
1633 (double_int_negative_p (temp)
1634 && (TYPE_UNSIGNED (type)
1635 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1636 | TREE_OVERFLOW (arg1));
1641 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1642 to another floating point type. */
1645 fold_convert_const_real_from_real (tree type, const_tree arg1)
1647 REAL_VALUE_TYPE value;
1650 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1651 t = build_real (type, value);
1653 /* If converting an infinity or NAN to a representation that doesn't
1654 have one, set the overflow bit so that we can produce some kind of
1655 error message at the appropriate point if necessary. It's not the
1656 most user-friendly message, but it's better than nothing. */
1657 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1658 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1659 TREE_OVERFLOW (t) = 1;
1660 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1661 && !MODE_HAS_NANS (TYPE_MODE (type)))
1662 TREE_OVERFLOW (t) = 1;
1663 /* Regular overflow, conversion produced an infinity in a mode that
1664 can't represent them. */
1665 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1666 && REAL_VALUE_ISINF (value)
1667 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1668 TREE_OVERFLOW (t) = 1;
1670 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1674 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1675 to a floating point type. */
1678 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1680 REAL_VALUE_TYPE value;
1683 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1684 t = build_real (type, value);
1686 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1690 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1691 to another fixed-point type. */
1694 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1696 FIXED_VALUE_TYPE value;
1700 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1701 TYPE_SATURATING (type));
1702 t = build_fixed (type, value);
1704 /* Propagate overflow flags. */
1705 if (overflow_p | TREE_OVERFLOW (arg1))
1706 TREE_OVERFLOW (t) = 1;
1710 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1711 to a fixed-point type. */
1714 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1716 FIXED_VALUE_TYPE value;
1720 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1721 TREE_INT_CST (arg1),
1722 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1723 TYPE_SATURATING (type));
1724 t = build_fixed (type, value);
1726 /* Propagate overflow flags. */
1727 if (overflow_p | TREE_OVERFLOW (arg1))
1728 TREE_OVERFLOW (t) = 1;
1732 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1733 to a fixed-point type. */
1736 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1738 FIXED_VALUE_TYPE value;
1742 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1743 &TREE_REAL_CST (arg1),
1744 TYPE_SATURATING (type));
1745 t = build_fixed (type, value);
1747 /* Propagate overflow flags. */
1748 if (overflow_p | TREE_OVERFLOW (arg1))
1749 TREE_OVERFLOW (t) = 1;
1753 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1754 type TYPE. If no simplification can be done return NULL_TREE. */
1757 fold_convert_const (enum tree_code code, tree type, tree arg1)
1759 if (TREE_TYPE (arg1) == type)
1762 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1763 || TREE_CODE (type) == OFFSET_TYPE)
1765 if (TREE_CODE (arg1) == INTEGER_CST)
1766 return fold_convert_const_int_from_int (type, arg1);
1767 else if (TREE_CODE (arg1) == REAL_CST)
1768 return fold_convert_const_int_from_real (code, type, arg1);
1769 else if (TREE_CODE (arg1) == FIXED_CST)
1770 return fold_convert_const_int_from_fixed (type, arg1);
1772 else if (TREE_CODE (type) == REAL_TYPE)
1774 if (TREE_CODE (arg1) == INTEGER_CST)
1775 return build_real_from_int_cst (type, arg1);
1776 else if (TREE_CODE (arg1) == REAL_CST)
1777 return fold_convert_const_real_from_real (type, arg1);
1778 else if (TREE_CODE (arg1) == FIXED_CST)
1779 return fold_convert_const_real_from_fixed (type, arg1);
1781 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1783 if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_fixed_from_fixed (type, arg1);
1785 else if (TREE_CODE (arg1) == INTEGER_CST)
1786 return fold_convert_const_fixed_from_int (type, arg1);
1787 else if (TREE_CODE (arg1) == REAL_CST)
1788 return fold_convert_const_fixed_from_real (type, arg1);
1793 /* Construct a vector of zero elements of vector type TYPE. */
1796 build_zero_vector (tree type)
1801 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1802 units = TYPE_VECTOR_SUBPARTS (type);
1805 for (i = 0; i < units; i++)
1806 list = tree_cons (NULL_TREE, elem, list);
1807 return build_vector (type, list);
1810 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1813 fold_convertible_p (const_tree type, const_tree arg)
1815 tree orig = TREE_TYPE (arg);
1820 if (TREE_CODE (arg) == ERROR_MARK
1821 || TREE_CODE (type) == ERROR_MARK
1822 || TREE_CODE (orig) == ERROR_MARK)
1825 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1828 switch (TREE_CODE (type))
1830 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1831 case POINTER_TYPE: case REFERENCE_TYPE:
1833 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1834 || TREE_CODE (orig) == OFFSET_TYPE)
1836 return (TREE_CODE (orig) == VECTOR_TYPE
1837 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1840 case FIXED_POINT_TYPE:
1844 return TREE_CODE (type) == TREE_CODE (orig);
1851 /* Convert expression ARG to type TYPE. Used by the middle-end for
1852 simple conversions in preference to calling the front-end's convert. */
1855 fold_convert_loc (location_t loc, tree type, tree arg)
1857 tree orig = TREE_TYPE (arg);
1863 if (TREE_CODE (arg) == ERROR_MARK
1864 || TREE_CODE (type) == ERROR_MARK
1865 || TREE_CODE (orig) == ERROR_MARK)
1866 return error_mark_node;
1868 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1869 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1871 switch (TREE_CODE (type))
1874 case REFERENCE_TYPE:
1875 /* Handle conversions between pointers to different address spaces. */
1876 if (POINTER_TYPE_P (orig)
1877 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1878 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1879 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1882 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1884 if (TREE_CODE (arg) == INTEGER_CST)
1886 tem = fold_convert_const (NOP_EXPR, type, arg);
1887 if (tem != NULL_TREE)
1890 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1891 || TREE_CODE (orig) == OFFSET_TYPE)
1892 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1893 if (TREE_CODE (orig) == COMPLEX_TYPE)
1894 return fold_convert_loc (loc, type,
1895 fold_build1_loc (loc, REALPART_EXPR,
1896 TREE_TYPE (orig), arg));
1897 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1898 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1899 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1902 if (TREE_CODE (arg) == INTEGER_CST)
1904 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1905 if (tem != NULL_TREE)
1908 else if (TREE_CODE (arg) == REAL_CST)
1910 tem = fold_convert_const (NOP_EXPR, type, arg);
1911 if (tem != NULL_TREE)
1914 else if (TREE_CODE (arg) == FIXED_CST)
1916 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1917 if (tem != NULL_TREE)
1921 switch (TREE_CODE (orig))
1924 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1925 case POINTER_TYPE: case REFERENCE_TYPE:
1926 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1929 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1931 case FIXED_POINT_TYPE:
1932 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1935 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1936 return fold_convert_loc (loc, type, tem);
1942 case FIXED_POINT_TYPE:
1943 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1944 || TREE_CODE (arg) == REAL_CST)
1946 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1947 if (tem != NULL_TREE)
1948 goto fold_convert_exit;
1951 switch (TREE_CODE (orig))
1953 case FIXED_POINT_TYPE:
1958 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1961 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1962 return fold_convert_loc (loc, type, tem);
1969 switch (TREE_CODE (orig))
1972 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1973 case POINTER_TYPE: case REFERENCE_TYPE:
1975 case FIXED_POINT_TYPE:
1976 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1977 fold_convert_loc (loc, TREE_TYPE (type), arg),
1978 fold_convert_loc (loc, TREE_TYPE (type),
1979 integer_zero_node));
1984 if (TREE_CODE (arg) == COMPLEX_EXPR)
1986 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1987 TREE_OPERAND (arg, 0));
1988 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1989 TREE_OPERAND (arg, 1));
1990 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1993 arg = save_expr (arg);
1994 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1995 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1996 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1997 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2006 if (integer_zerop (arg))
2007 return build_zero_vector (type);
2008 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2009 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2010 || TREE_CODE (orig) == VECTOR_TYPE);
2011 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2014 tem = fold_ignored_result (arg);
2015 if (TREE_CODE (tem) == MODIFY_EXPR)
2016 goto fold_convert_exit;
2017 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2023 protected_set_expr_location (tem, loc);
2027 /* Return false if expr can be assumed not to be an lvalue, true
2031 maybe_lvalue_p (const_tree x)
2033 /* We only need to wrap lvalue tree codes. */
2034 switch (TREE_CODE (x))
2045 case ALIGN_INDIRECT_REF:
2046 case MISALIGNED_INDIRECT_REF:
2048 case ARRAY_RANGE_REF:
2054 case PREINCREMENT_EXPR:
2055 case PREDECREMENT_EXPR:
2057 case TRY_CATCH_EXPR:
2058 case WITH_CLEANUP_EXPR:
2067 /* Assume the worst for front-end tree codes. */
2068 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2076 /* Return an expr equal to X but certainly not valid as an lvalue. */
2079 non_lvalue_loc (location_t loc, tree x)
2081 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2086 if (! maybe_lvalue_p (x))
2088 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2089 SET_EXPR_LOCATION (x, loc);
2093 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2094 Zero means allow extended lvalues. */
2096 int pedantic_lvalues;
2098 /* When pedantic, return an expr equal to X but certainly not valid as a
2099 pedantic lvalue. Otherwise, return X. */
2102 pedantic_non_lvalue_loc (location_t loc, tree x)
2104 if (pedantic_lvalues)
2105 return non_lvalue_loc (loc, x);
2106 protected_set_expr_location (x, loc);
2110 /* Given a tree comparison code, return the code that is the logical inverse
2111 of the given code. It is not safe to do this for floating-point
2112 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2113 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2116 invert_tree_comparison (enum tree_code code, bool honor_nans)
2118 if (honor_nans && flag_trapping_math)
2128 return honor_nans ? UNLE_EXPR : LE_EXPR;
2130 return honor_nans ? UNLT_EXPR : LT_EXPR;
2132 return honor_nans ? UNGE_EXPR : GE_EXPR;
2134 return honor_nans ? UNGT_EXPR : GT_EXPR;
2148 return UNORDERED_EXPR;
2149 case UNORDERED_EXPR:
2150 return ORDERED_EXPR;
2156 /* Similar, but return the comparison that results if the operands are
2157 swapped. This is safe for floating-point. */
2160 swap_tree_comparison (enum tree_code code)
2167 case UNORDERED_EXPR:
2193 /* Convert a comparison tree code from an enum tree_code representation
2194 into a compcode bit-based encoding. This function is the inverse of
2195 compcode_to_comparison. */
2197 static enum comparison_code
2198 comparison_to_compcode (enum tree_code code)
2215 return COMPCODE_ORD;
2216 case UNORDERED_EXPR:
2217 return COMPCODE_UNORD;
2219 return COMPCODE_UNLT;
2221 return COMPCODE_UNEQ;
2223 return COMPCODE_UNLE;
2225 return COMPCODE_UNGT;
2227 return COMPCODE_LTGT;
2229 return COMPCODE_UNGE;
2235 /* Convert a compcode bit-based encoding of a comparison operator back
2236 to GCC's enum tree_code representation. This function is the
2237 inverse of comparison_to_compcode. */
2239 static enum tree_code
2240 compcode_to_comparison (enum comparison_code code)
2257 return ORDERED_EXPR;
2258 case COMPCODE_UNORD:
2259 return UNORDERED_EXPR;
2277 /* Return a tree for the comparison which is the combination of
2278 doing the AND or OR (depending on CODE) of the two operations LCODE
2279 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2280 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2281 if this makes the transformation invalid. */
2284 combine_comparisons (location_t loc,
2285 enum tree_code code, enum tree_code lcode,
2286 enum tree_code rcode, tree truth_type,
2287 tree ll_arg, tree lr_arg)
2289 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2290 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2291 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2296 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2297 compcode = lcompcode & rcompcode;
2300 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2301 compcode = lcompcode | rcompcode;
2310 /* Eliminate unordered comparisons, as well as LTGT and ORD
2311 which are not used unless the mode has NaNs. */
2312 compcode &= ~COMPCODE_UNORD;
2313 if (compcode == COMPCODE_LTGT)
2314 compcode = COMPCODE_NE;
2315 else if (compcode == COMPCODE_ORD)
2316 compcode = COMPCODE_TRUE;
2318 else if (flag_trapping_math)
2320 /* Check that the original operation and the optimized ones will trap
2321 under the same condition. */
2322 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2323 && (lcompcode != COMPCODE_EQ)
2324 && (lcompcode != COMPCODE_ORD);
2325 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2326 && (rcompcode != COMPCODE_EQ)
2327 && (rcompcode != COMPCODE_ORD);
2328 bool trap = (compcode & COMPCODE_UNORD) == 0
2329 && (compcode != COMPCODE_EQ)
2330 && (compcode != COMPCODE_ORD);
2332 /* In a short-circuited boolean expression the LHS might be
2333 such that the RHS, if evaluated, will never trap. For
2334 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2335 if neither x nor y is NaN. (This is a mixed blessing: for
2336 example, the expression above will never trap, hence
2337 optimizing it to x < y would be invalid). */
2338 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2339 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2342 /* If the comparison was short-circuited, and only the RHS
2343 trapped, we may now generate a spurious trap. */
2345 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2348 /* If we changed the conditions that cause a trap, we lose. */
2349 if ((ltrap || rtrap) != trap)
2353 if (compcode == COMPCODE_TRUE)
2354 return constant_boolean_node (true, truth_type);
2355 else if (compcode == COMPCODE_FALSE)
2356 return constant_boolean_node (false, truth_type);
2359 enum tree_code tcode;
2361 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2362 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2366 /* Return nonzero if two operands (typically of the same tree node)
2367 are necessarily equal. If either argument has side-effects this
2368 function returns zero. FLAGS modifies behavior as follows:
2370 If OEP_ONLY_CONST is set, only return nonzero for constants.
2371 This function tests whether the operands are indistinguishable;
2372 it does not test whether they are equal using C's == operation.
2373 The distinction is important for IEEE floating point, because
2374 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2375 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2377 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2378 even though it may hold multiple values during a function.
2379 This is because a GCC tree node guarantees that nothing else is
2380 executed between the evaluation of its "operands" (which may often
2381 be evaluated in arbitrary order). Hence if the operands themselves
2382 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2383 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2384 unset means assuming isochronic (or instantaneous) tree equivalence.
2385 Unless comparing arbitrary expression trees, such as from different
2386 statements, this flag can usually be left unset.
2388 If OEP_PURE_SAME is set, then pure functions with identical arguments
2389 are considered the same. It is used when the caller has other ways
2390 to ensure that global memory is unchanged in between. */
2393 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2395 /* If either is ERROR_MARK, they aren't equal. */
2396 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2397 || TREE_TYPE (arg0) == error_mark_node
2398 || TREE_TYPE (arg1) == error_mark_node)
2401 /* Similar, if either does not have a type (like a released SSA name),
2402 they aren't equal. */
2403 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2406 /* Check equality of integer constants before bailing out due to
2407 precision differences. */
2408 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2409 return tree_int_cst_equal (arg0, arg1);
2411 /* If both types don't have the same signedness, then we can't consider
2412 them equal. We must check this before the STRIP_NOPS calls
2413 because they may change the signedness of the arguments. As pointers
2414 strictly don't have a signedness, require either two pointers or
2415 two non-pointers as well. */
2416 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2417 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2420 /* We cannot consider pointers to different address space equal. */
2421 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2422 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2423 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2426 /* If both types don't have the same precision, then it is not safe
2428 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2434 /* In case both args are comparisons but with different comparison
2435 code, try to swap the comparison operands of one arg to produce
2436 a match and compare that variant. */
2437 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2438 && COMPARISON_CLASS_P (arg0)
2439 && COMPARISON_CLASS_P (arg1))
2441 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2443 if (TREE_CODE (arg0) == swap_code)
2444 return operand_equal_p (TREE_OPERAND (arg0, 0),
2445 TREE_OPERAND (arg1, 1), flags)
2446 && operand_equal_p (TREE_OPERAND (arg0, 1),
2447 TREE_OPERAND (arg1, 0), flags);
2450 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2451 /* This is needed for conversions and for COMPONENT_REF.
2452 Might as well play it safe and always test this. */
2453 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2454 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2455 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2458 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2459 We don't care about side effects in that case because the SAVE_EXPR
2460 takes care of that for us. In all other cases, two expressions are
2461 equal if they have no side effects. If we have two identical
2462 expressions with side effects that should be treated the same due
2463 to the only side effects being identical SAVE_EXPR's, that will
2464 be detected in the recursive calls below. */
2465 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2466 && (TREE_CODE (arg0) == SAVE_EXPR
2467 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2470 /* Next handle constant cases, those for which we can return 1 even
2471 if ONLY_CONST is set. */
2472 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2473 switch (TREE_CODE (arg0))
2476 return tree_int_cst_equal (arg0, arg1);
2479 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2480 TREE_FIXED_CST (arg1));
2483 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2484 TREE_REAL_CST (arg1)))
2488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2490 /* If we do not distinguish between signed and unsigned zero,
2491 consider them equal. */
2492 if (real_zerop (arg0) && real_zerop (arg1))
2501 v1 = TREE_VECTOR_CST_ELTS (arg0);
2502 v2 = TREE_VECTOR_CST_ELTS (arg1);
2505 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2508 v1 = TREE_CHAIN (v1);
2509 v2 = TREE_CHAIN (v2);
2516 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2518 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2522 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2523 && ! memcmp (TREE_STRING_POINTER (arg0),
2524 TREE_STRING_POINTER (arg1),
2525 TREE_STRING_LENGTH (arg0)));
2528 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2534 if (flags & OEP_ONLY_CONST)
2537 /* Define macros to test an operand from arg0 and arg1 for equality and a
2538 variant that allows null and views null as being different from any
2539 non-null value. In the latter case, if either is null, the both
2540 must be; otherwise, do the normal comparison. */
2541 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2542 TREE_OPERAND (arg1, N), flags)
2544 #define OP_SAME_WITH_NULL(N) \
2545 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2546 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2548 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2551 /* Two conversions are equal only if signedness and modes match. */
2552 switch (TREE_CODE (arg0))
2555 case FIX_TRUNC_EXPR:
2556 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2557 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2567 case tcc_comparison:
2569 if (OP_SAME (0) && OP_SAME (1))
2572 /* For commutative ops, allow the other order. */
2573 return (commutative_tree_code (TREE_CODE (arg0))
2574 && operand_equal_p (TREE_OPERAND (arg0, 0),
2575 TREE_OPERAND (arg1, 1), flags)
2576 && operand_equal_p (TREE_OPERAND (arg0, 1),
2577 TREE_OPERAND (arg1, 0), flags));
2580 /* If either of the pointer (or reference) expressions we are
2581 dereferencing contain a side effect, these cannot be equal. */
2582 if (TREE_SIDE_EFFECTS (arg0)
2583 || TREE_SIDE_EFFECTS (arg1))
2586 switch (TREE_CODE (arg0))
2589 case ALIGN_INDIRECT_REF:
2590 case MISALIGNED_INDIRECT_REF:
2596 /* Require equal access sizes. We can have incomplete types
2597 for array references of variable-sized arrays from the
2598 Fortran frontent though. */
2599 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2600 || (TYPE_SIZE (TREE_TYPE (arg0))
2601 && TYPE_SIZE (TREE_TYPE (arg1))
2602 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2603 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2604 && OP_SAME (0) && OP_SAME (1));
2607 case ARRAY_RANGE_REF:
2608 /* Operands 2 and 3 may be null.
2609 Compare the array index by value if it is constant first as we
2610 may have different types but same value here. */
2612 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2613 TREE_OPERAND (arg1, 1))
2615 && OP_SAME_WITH_NULL (2)
2616 && OP_SAME_WITH_NULL (3));
2619 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2620 may be NULL when we're called to compare MEM_EXPRs. */
2621 return OP_SAME_WITH_NULL (0)
2623 && OP_SAME_WITH_NULL (2);
2626 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2632 case tcc_expression:
2633 switch (TREE_CODE (arg0))
2636 case TRUTH_NOT_EXPR:
2639 case TRUTH_ANDIF_EXPR:
2640 case TRUTH_ORIF_EXPR:
2641 return OP_SAME (0) && OP_SAME (1);
2643 case TRUTH_AND_EXPR:
2645 case TRUTH_XOR_EXPR:
2646 if (OP_SAME (0) && OP_SAME (1))
2649 /* Otherwise take into account this is a commutative operation. */
2650 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2651 TREE_OPERAND (arg1, 1), flags)
2652 && operand_equal_p (TREE_OPERAND (arg0, 1),
2653 TREE_OPERAND (arg1, 0), flags));
2656 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2663 switch (TREE_CODE (arg0))
2666 /* If the CALL_EXPRs call different functions, then they
2667 clearly can not be equal. */
2668 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2673 unsigned int cef = call_expr_flags (arg0);
2674 if (flags & OEP_PURE_SAME)
2675 cef &= ECF_CONST | ECF_PURE;
2682 /* Now see if all the arguments are the same. */
2684 const_call_expr_arg_iterator iter0, iter1;
2686 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2687 a1 = first_const_call_expr_arg (arg1, &iter1);
2689 a0 = next_const_call_expr_arg (&iter0),
2690 a1 = next_const_call_expr_arg (&iter1))
2691 if (! operand_equal_p (a0, a1, flags))
2694 /* If we get here and both argument lists are exhausted
2695 then the CALL_EXPRs are equal. */
2696 return ! (a0 || a1);
2702 case tcc_declaration:
2703 /* Consider __builtin_sqrt equal to sqrt. */
2704 return (TREE_CODE (arg0) == FUNCTION_DECL
2705 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2706 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2707 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2714 #undef OP_SAME_WITH_NULL
2717 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2718 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2720 When in doubt, return 0. */
2723 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2725 int unsignedp1, unsignedpo;
2726 tree primarg0, primarg1, primother;
2727 unsigned int correct_width;
2729 if (operand_equal_p (arg0, arg1, 0))
2732 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2733 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2736 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2737 and see if the inner values are the same. This removes any
2738 signedness comparison, which doesn't matter here. */
2739 primarg0 = arg0, primarg1 = arg1;
2740 STRIP_NOPS (primarg0);
2741 STRIP_NOPS (primarg1);
2742 if (operand_equal_p (primarg0, primarg1, 0))
2745 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2746 actual comparison operand, ARG0.
2748 First throw away any conversions to wider types
2749 already present in the operands. */
2751 primarg1 = get_narrower (arg1, &unsignedp1);
2752 primother = get_narrower (other, &unsignedpo);
2754 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2755 if (unsignedp1 == unsignedpo
2756 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2757 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2759 tree type = TREE_TYPE (arg0);
2761 /* Make sure shorter operand is extended the right way
2762 to match the longer operand. */
2763 primarg1 = fold_convert (signed_or_unsigned_type_for
2764 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2766 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2773 /* See if ARG is an expression that is either a comparison or is performing
2774 arithmetic on comparisons. The comparisons must only be comparing
2775 two different values, which will be stored in *CVAL1 and *CVAL2; if
2776 they are nonzero it means that some operands have already been found.
2777 No variables may be used anywhere else in the expression except in the
2778 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2779 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2781 If this is true, return 1. Otherwise, return zero. */
2784 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2786 enum tree_code code = TREE_CODE (arg);
2787 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2789 /* We can handle some of the tcc_expression cases here. */
2790 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2792 else if (tclass == tcc_expression
2793 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2794 || code == COMPOUND_EXPR))
2795 tclass = tcc_binary;
2797 else if (tclass == tcc_expression && code == SAVE_EXPR
2798 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2800 /* If we've already found a CVAL1 or CVAL2, this expression is
2801 two complex to handle. */
2802 if (*cval1 || *cval2)
2812 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2815 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2816 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2817 cval1, cval2, save_p));
2822 case tcc_expression:
2823 if (code == COND_EXPR)
2824 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2825 cval1, cval2, save_p)
2826 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2827 cval1, cval2, save_p)
2828 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2829 cval1, cval2, save_p));
2832 case tcc_comparison:
2833 /* First see if we can handle the first operand, then the second. For
2834 the second operand, we know *CVAL1 can't be zero. It must be that
2835 one side of the comparison is each of the values; test for the
2836 case where this isn't true by failing if the two operands
2839 if (operand_equal_p (TREE_OPERAND (arg, 0),
2840 TREE_OPERAND (arg, 1), 0))
2844 *cval1 = TREE_OPERAND (arg, 0);
2845 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2847 else if (*cval2 == 0)
2848 *cval2 = TREE_OPERAND (arg, 0);
2849 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2854 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2856 else if (*cval2 == 0)
2857 *cval2 = TREE_OPERAND (arg, 1);
2858 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2870 /* ARG is a tree that is known to contain just arithmetic operations and
2871 comparisons. Evaluate the operations in the tree substituting NEW0 for
2872 any occurrence of OLD0 as an operand of a comparison and likewise for
2876 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2877 tree old1, tree new1)
2879 tree type = TREE_TYPE (arg);
2880 enum tree_code code = TREE_CODE (arg);
2881 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2883 /* We can handle some of the tcc_expression cases here. */
2884 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2886 else if (tclass == tcc_expression
2887 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2888 tclass = tcc_binary;
2893 return fold_build1_loc (loc, code, type,
2894 eval_subst (loc, TREE_OPERAND (arg, 0),
2895 old0, new0, old1, new1));
2898 return fold_build2_loc (loc, code, type,
2899 eval_subst (loc, TREE_OPERAND (arg, 0),
2900 old0, new0, old1, new1),
2901 eval_subst (loc, TREE_OPERAND (arg, 1),
2902 old0, new0, old1, new1));
2904 case tcc_expression:
2908 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2912 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2916 return fold_build3_loc (loc, code, type,
2917 eval_subst (loc, TREE_OPERAND (arg, 0),
2918 old0, new0, old1, new1),
2919 eval_subst (loc, TREE_OPERAND (arg, 1),
2920 old0, new0, old1, new1),
2921 eval_subst (loc, TREE_OPERAND (arg, 2),
2922 old0, new0, old1, new1));
2926 /* Fall through - ??? */
2928 case tcc_comparison:
2930 tree arg0 = TREE_OPERAND (arg, 0);
2931 tree arg1 = TREE_OPERAND (arg, 1);
2933 /* We need to check both for exact equality and tree equality. The
2934 former will be true if the operand has a side-effect. In that
2935 case, we know the operand occurred exactly once. */
2937 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2939 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2942 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2944 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2947 return fold_build2_loc (loc, code, type, arg0, arg1);
2955 /* Return a tree for the case when the result of an expression is RESULT
2956 converted to TYPE and OMITTED was previously an operand of the expression
2957 but is now not needed (e.g., we folded OMITTED * 0).
2959 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2960 the conversion of RESULT to TYPE. */
2963 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2965 tree t = fold_convert_loc (loc, type, result);
2967 /* If the resulting operand is an empty statement, just return the omitted
2968 statement casted to void. */
2969 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2971 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2972 goto omit_one_operand_exit;
2975 if (TREE_SIDE_EFFECTS (omitted))
2977 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2978 goto omit_one_operand_exit;
2981 return non_lvalue_loc (loc, t);
2983 omit_one_operand_exit:
2984 protected_set_expr_location (t, loc);
2988 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2991 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2994 tree t = fold_convert_loc (loc, type, result);
2996 /* If the resulting operand is an empty statement, just return the omitted
2997 statement casted to void. */
2998 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3000 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3001 goto pedantic_omit_one_operand_exit;
3004 if (TREE_SIDE_EFFECTS (omitted))
3006 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3007 goto pedantic_omit_one_operand_exit;
3010 return pedantic_non_lvalue_loc (loc, t);
3012 pedantic_omit_one_operand_exit:
3013 protected_set_expr_location (t, loc);
3017 /* Return a tree for the case when the result of an expression is RESULT
3018 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3019 of the expression but are now not needed.
3021 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3022 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3023 evaluated before OMITTED2. Otherwise, if neither has side effects,
3024 just do the conversion of RESULT to TYPE. */
3027 omit_two_operands_loc (location_t loc, tree type, tree result,
3028 tree omitted1, tree omitted2)
3030 tree t = fold_convert_loc (loc, type, result);
3032 if (TREE_SIDE_EFFECTS (omitted2))
3034 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3035 SET_EXPR_LOCATION (t, loc);
3037 if (TREE_SIDE_EFFECTS (omitted1))
3039 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3040 SET_EXPR_LOCATION (t, loc);
3043 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3047 /* Return a simplified tree node for the truth-negation of ARG. This
3048 never alters ARG itself. We assume that ARG is an operation that
3049 returns a truth value (0 or 1).
3051 FIXME: one would think we would fold the result, but it causes
3052 problems with the dominator optimizer. */
3055 fold_truth_not_expr (location_t loc, tree arg)
3057 tree t, type = TREE_TYPE (arg);
3058 enum tree_code code = TREE_CODE (arg);
3059 location_t loc1, loc2;
3061 /* If this is a comparison, we can simply invert it, except for
3062 floating-point non-equality comparisons, in which case we just
3063 enclose a TRUTH_NOT_EXPR around what we have. */
3065 if (TREE_CODE_CLASS (code) == tcc_comparison)
3067 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3068 if (FLOAT_TYPE_P (op_type)
3069 && flag_trapping_math
3070 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3071 && code != NE_EXPR && code != EQ_EXPR)
3074 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3075 if (code == ERROR_MARK)
3078 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3079 SET_EXPR_LOCATION (t, loc);
3086 return constant_boolean_node (integer_zerop (arg), type);
3088 case TRUTH_AND_EXPR:
3089 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3090 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3091 if (loc1 == UNKNOWN_LOCATION)
3093 if (loc2 == UNKNOWN_LOCATION)
3095 t = build2 (TRUTH_OR_EXPR, type,
3096 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3097 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3101 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3102 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3103 if (loc1 == UNKNOWN_LOCATION)
3105 if (loc2 == UNKNOWN_LOCATION)
3107 t = build2 (TRUTH_AND_EXPR, type,
3108 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3109 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3112 case TRUTH_XOR_EXPR:
3113 /* Here we can invert either operand. We invert the first operand
3114 unless the second operand is a TRUTH_NOT_EXPR in which case our
3115 result is the XOR of the first operand with the inside of the
3116 negation of the second operand. */
3118 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3119 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3120 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3122 t = build2 (TRUTH_XOR_EXPR, type,
3123 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3124 TREE_OPERAND (arg, 1));
3127 case TRUTH_ANDIF_EXPR:
3128 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3129 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3130 if (loc1 == UNKNOWN_LOCATION)
3132 if (loc2 == UNKNOWN_LOCATION)
3134 t = build2 (TRUTH_ORIF_EXPR, type,
3135 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3136 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3139 case TRUTH_ORIF_EXPR:
3140 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3141 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3142 if (loc1 == UNKNOWN_LOCATION)
3144 if (loc2 == UNKNOWN_LOCATION)
3146 t = build2 (TRUTH_ANDIF_EXPR, type,
3147 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3148 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3151 case TRUTH_NOT_EXPR:
3152 return TREE_OPERAND (arg, 0);
3156 tree arg1 = TREE_OPERAND (arg, 1);
3157 tree arg2 = TREE_OPERAND (arg, 2);
3159 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3160 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3161 if (loc1 == UNKNOWN_LOCATION)
3163 if (loc2 == UNKNOWN_LOCATION)
3166 /* A COND_EXPR may have a throw as one operand, which
3167 then has void type. Just leave void operands
3169 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3170 VOID_TYPE_P (TREE_TYPE (arg1))
3171 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3172 VOID_TYPE_P (TREE_TYPE (arg2))
3173 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3178 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3179 if (loc1 == UNKNOWN_LOCATION)
3181 t = build2 (COMPOUND_EXPR, type,
3182 TREE_OPERAND (arg, 0),
3183 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3186 case NON_LVALUE_EXPR:
3187 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3188 if (loc1 == UNKNOWN_LOCATION)
3190 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3193 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3195 t = build1 (TRUTH_NOT_EXPR, type, arg);
3199 /* ... fall through ... */
3202 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3203 if (loc1 == UNKNOWN_LOCATION)
3205 t = build1 (TREE_CODE (arg), type,
3206 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3210 if (!integer_onep (TREE_OPERAND (arg, 1)))
3212 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3216 t = build1 (TRUTH_NOT_EXPR, type, arg);
3219 case CLEANUP_POINT_EXPR:
3220 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3221 if (loc1 == UNKNOWN_LOCATION)
3223 t = build1 (CLEANUP_POINT_EXPR, type,
3224 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3233 SET_EXPR_LOCATION (t, loc);
3238 /* Return a simplified tree node for the truth-negation of ARG. This
3239 never alters ARG itself. We assume that ARG is an operation that
3240 returns a truth value (0 or 1).
3242 FIXME: one would think we would fold the result, but it causes
3243 problems with the dominator optimizer. */
3246 invert_truthvalue_loc (location_t loc, tree arg)
3250 if (TREE_CODE (arg) == ERROR_MARK)
3253 tem = fold_truth_not_expr (loc, arg);
3256 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3257 SET_EXPR_LOCATION (tem, loc);
3263 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3264 operands are another bit-wise operation with a common input. If so,
3265 distribute the bit operations to save an operation and possibly two if
3266 constants are involved. For example, convert
3267 (A | B) & (A | C) into A | (B & C)
3268 Further simplification will occur if B and C are constants.
3270 If this optimization cannot be done, 0 will be returned. */
3273 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3274 tree arg0, tree arg1)
3279 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3280 || TREE_CODE (arg0) == code
3281 || (TREE_CODE (arg0) != BIT_AND_EXPR
3282 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3285 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3287 common = TREE_OPERAND (arg0, 0);
3288 left = TREE_OPERAND (arg0, 1);
3289 right = TREE_OPERAND (arg1, 1);
3291 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3293 common = TREE_OPERAND (arg0, 0);
3294 left = TREE_OPERAND (arg0, 1);
3295 right = TREE_OPERAND (arg1, 0);
3297 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3299 common = TREE_OPERAND (arg0, 1);
3300 left = TREE_OPERAND (arg0, 0);
3301 right = TREE_OPERAND (arg1, 1);
3303 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3305 common = TREE_OPERAND (arg0, 1);
3306 left = TREE_OPERAND (arg0, 0);
3307 right = TREE_OPERAND (arg1, 0);
3312 common = fold_convert_loc (loc, type, common);
3313 left = fold_convert_loc (loc, type, left);
3314 right = fold_convert_loc (loc, type, right);
3315 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3316 fold_build2_loc (loc, code, type, left, right));
3319 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3320 with code CODE. This optimization is unsafe. */
3322 distribute_real_division (location_t loc, enum tree_code code, tree type,
3323 tree arg0, tree arg1)
3325 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3326 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3328 /* (A / C) +- (B / C) -> (A +- B) / C. */
3330 && operand_equal_p (TREE_OPERAND (arg0, 1),
3331 TREE_OPERAND (arg1, 1), 0))
3332 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3333 fold_build2_loc (loc, code, type,
3334 TREE_OPERAND (arg0, 0),
3335 TREE_OPERAND (arg1, 0)),
3336 TREE_OPERAND (arg0, 1));
3338 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3339 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3340 TREE_OPERAND (arg1, 0), 0)
3341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3342 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3344 REAL_VALUE_TYPE r0, r1;
3345 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3346 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3348 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3350 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3351 real_arithmetic (&r0, code, &r0, &r1);
3352 return fold_build2_loc (loc, MULT_EXPR, type,
3353 TREE_OPERAND (arg0, 0),
3354 build_real (type, r0));
3360 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3361 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3364 make_bit_field_ref (location_t loc, tree inner, tree type,
3365 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3367 tree result, bftype;
3371 tree size = TYPE_SIZE (TREE_TYPE (inner));
3372 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3373 || POINTER_TYPE_P (TREE_TYPE (inner)))
3374 && host_integerp (size, 0)
3375 && tree_low_cst (size, 0) == bitsize)
3376 return fold_convert_loc (loc, type, inner);
3380 if (TYPE_PRECISION (bftype) != bitsize
3381 || TYPE_UNSIGNED (bftype) == !unsignedp)
3382 bftype = build_nonstandard_integer_type (bitsize, 0);
3384 result = build3 (BIT_FIELD_REF, bftype, inner,
3385 size_int (bitsize), bitsize_int (bitpos));
3386 SET_EXPR_LOCATION (result, loc);
3389 result = fold_convert_loc (loc, type, result);
3394 /* Optimize a bit-field compare.
3396 There are two cases: First is a compare against a constant and the
3397 second is a comparison of two items where the fields are at the same
3398 bit position relative to the start of a chunk (byte, halfword, word)
3399 large enough to contain it. In these cases we can avoid the shift
3400 implicit in bitfield extractions.
3402 For constants, we emit a compare of the shifted constant with the
3403 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3404 compared. For two fields at the same position, we do the ANDs with the
3405 similar mask and compare the result of the ANDs.
3407 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3408 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3409 are the left and right operands of the comparison, respectively.
3411 If the optimization described above can be done, we return the resulting
3412 tree. Otherwise we return zero. */
3415 optimize_bit_field_compare (location_t loc, enum tree_code code,
3416 tree compare_type, tree lhs, tree rhs)
3418 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3419 tree type = TREE_TYPE (lhs);
3420 tree signed_type, unsigned_type;
3421 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3422 enum machine_mode lmode, rmode, nmode;
3423 int lunsignedp, runsignedp;
3424 int lvolatilep = 0, rvolatilep = 0;
3425 tree linner, rinner = NULL_TREE;
3429 /* Get all the information about the extractions being done. If the bit size
3430 if the same as the size of the underlying object, we aren't doing an
3431 extraction at all and so can do nothing. We also don't want to
3432 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3433 then will no longer be able to replace it. */
3434 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3435 &lunsignedp, &lvolatilep, false);
3436 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3437 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3442 /* If this is not a constant, we can only do something if bit positions,
3443 sizes, and signedness are the same. */
3444 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3445 &runsignedp, &rvolatilep, false);
3447 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3448 || lunsignedp != runsignedp || offset != 0
3449 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3453 /* See if we can find a mode to refer to this field. We should be able to,
3454 but fail if we can't. */
3456 && GET_MODE_BITSIZE (lmode) > 0
3457 && flag_strict_volatile_bitfields > 0)
3460 nmode = get_best_mode (lbitsize, lbitpos,
3461 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3462 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3463 TYPE_ALIGN (TREE_TYPE (rinner))),
3464 word_mode, lvolatilep || rvolatilep);
3465 if (nmode == VOIDmode)
3468 /* Set signed and unsigned types of the precision of this mode for the
3470 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3471 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3473 /* Compute the bit position and size for the new reference and our offset
3474 within it. If the new reference is the same size as the original, we
3475 won't optimize anything, so return zero. */
3476 nbitsize = GET_MODE_BITSIZE (nmode);
3477 nbitpos = lbitpos & ~ (nbitsize - 1);
3479 if (nbitsize == lbitsize)
3482 if (BYTES_BIG_ENDIAN)
3483 lbitpos = nbitsize - lbitsize - lbitpos;
3485 /* Make the mask to be used against the extracted field. */
3486 mask = build_int_cst_type (unsigned_type, -1);
3487 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3488 mask = const_binop (RSHIFT_EXPR, mask,
3489 size_int (nbitsize - lbitsize - lbitpos));
3492 /* If not comparing with constant, just rework the comparison
3494 return fold_build2_loc (loc, code, compare_type,
3495 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3496 make_bit_field_ref (loc, linner,
3501 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3502 make_bit_field_ref (loc, rinner,
3508 /* Otherwise, we are handling the constant case. See if the constant is too
3509 big for the field. Warn and return a tree of for 0 (false) if so. We do
3510 this not only for its own sake, but to avoid having to test for this
3511 error case below. If we didn't, we might generate wrong code.
3513 For unsigned fields, the constant shifted right by the field length should
3514 be all zero. For signed fields, the high-order bits should agree with
3519 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3520 fold_convert_loc (loc,
3521 unsigned_type, rhs),
3522 size_int (lbitsize))))
3524 warning (0, "comparison is always %d due to width of bit-field",
3526 return constant_boolean_node (code == NE_EXPR, compare_type);
3531 tree tem = const_binop (RSHIFT_EXPR,
3532 fold_convert_loc (loc, signed_type, rhs),
3533 size_int (lbitsize - 1));
3534 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3536 warning (0, "comparison is always %d due to width of bit-field",
3538 return constant_boolean_node (code == NE_EXPR, compare_type);
3542 /* Single-bit compares should always be against zero. */
3543 if (lbitsize == 1 && ! integer_zerop (rhs))
3545 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3546 rhs = build_int_cst (type, 0);
3549 /* Make a new bitfield reference, shift the constant over the
3550 appropriate number of bits and mask it with the computed mask
3551 (in case this was a signed field). If we changed it, make a new one. */
3552 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3555 TREE_SIDE_EFFECTS (lhs) = 1;
3556 TREE_THIS_VOLATILE (lhs) = 1;
3559 rhs = const_binop (BIT_AND_EXPR,
3560 const_binop (LSHIFT_EXPR,
3561 fold_convert_loc (loc, unsigned_type, rhs),
3562 size_int (lbitpos)),
3565 lhs = build2 (code, compare_type,
3566 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3568 SET_EXPR_LOCATION (lhs, loc);
3572 /* Subroutine for fold_truthop: decode a field reference.
3574 If EXP is a comparison reference, we return the innermost reference.
3576 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3577 set to the starting bit number.
3579 If the innermost field can be completely contained in a mode-sized
3580 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3582 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3583 otherwise it is not changed.
3585 *PUNSIGNEDP is set to the signedness of the field.
3587 *PMASK is set to the mask used. This is either contained in a
3588 BIT_AND_EXPR or derived from the width of the field.
3590 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3592 Return 0 if this is not a component reference or is one that we can't
3593 do anything with. */
3596 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3597 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3598 int *punsignedp, int *pvolatilep,
3599 tree *pmask, tree *pand_mask)
3601 tree outer_type = 0;
3603 tree mask, inner, offset;
3605 unsigned int precision;
3607 /* All the optimizations using this function assume integer fields.
3608 There are problems with FP fields since the type_for_size call
3609 below can fail for, e.g., XFmode. */
3610 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3613 /* We are interested in the bare arrangement of bits, so strip everything
3614 that doesn't affect the machine mode. However, record the type of the
3615 outermost expression if it may matter below. */
3616 if (CONVERT_EXPR_P (exp)
3617 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3618 outer_type = TREE_TYPE (exp);
3621 if (TREE_CODE (exp) == BIT_AND_EXPR)
3623 and_mask = TREE_OPERAND (exp, 1);
3624 exp = TREE_OPERAND (exp, 0);
3625 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3626 if (TREE_CODE (and_mask) != INTEGER_CST)
3630 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3631 punsignedp, pvolatilep, false);
3632 if ((inner == exp && and_mask == 0)
3633 || *pbitsize < 0 || offset != 0
3634 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3637 /* If the number of bits in the reference is the same as the bitsize of
3638 the outer type, then the outer type gives the signedness. Otherwise
3639 (in case of a small bitfield) the signedness is unchanged. */
3640 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3641 *punsignedp = TYPE_UNSIGNED (outer_type);
3643 /* Compute the mask to access the bitfield. */
3644 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3645 precision = TYPE_PRECISION (unsigned_type);
3647 mask = build_int_cst_type (unsigned_type, -1);
3649 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3650 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3652 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3654 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3655 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3658 *pand_mask = and_mask;
3662 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3666 all_ones_mask_p (const_tree mask, int size)
3668 tree type = TREE_TYPE (mask);
3669 unsigned int precision = TYPE_PRECISION (type);
3672 tmask = build_int_cst_type (signed_type_for (type), -1);
3675 tree_int_cst_equal (mask,
3676 const_binop (RSHIFT_EXPR,
3677 const_binop (LSHIFT_EXPR, tmask,
3678 size_int (precision - size)),
3679 size_int (precision - size)));
3682 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3683 represents the sign bit of EXP's type. If EXP represents a sign
3684 or zero extension, also test VAL against the unextended type.
3685 The return value is the (sub)expression whose sign bit is VAL,
3686 or NULL_TREE otherwise. */
3689 sign_bit_p (tree exp, const_tree val)
3691 unsigned HOST_WIDE_INT mask_lo, lo;
3692 HOST_WIDE_INT mask_hi, hi;
3696 /* Tree EXP must have an integral type. */
3697 t = TREE_TYPE (exp);
3698 if (! INTEGRAL_TYPE_P (t))
3701 /* Tree VAL must be an integer constant. */
3702 if (TREE_CODE (val) != INTEGER_CST
3703 || TREE_OVERFLOW (val))
3706 width = TYPE_PRECISION (t);
3707 if (width > HOST_BITS_PER_WIDE_INT)
3709 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3712 mask_hi = ((unsigned HOST_WIDE_INT) -1
3713 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3719 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3722 mask_lo = ((unsigned HOST_WIDE_INT) -1
3723 >> (HOST_BITS_PER_WIDE_INT - width));
3726 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3727 treat VAL as if it were unsigned. */
3728 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3729 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3732 /* Handle extension from a narrower type. */
3733 if (TREE_CODE (exp) == NOP_EXPR
3734 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3735 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3740 /* Subroutine for fold_truthop: determine if an operand is simple enough
3741 to be evaluated unconditionally. */
3744 simple_operand_p (const_tree exp)
3746 /* Strip any conversions that don't change the machine mode. */
3749 return (CONSTANT_CLASS_P (exp)
3750 || TREE_CODE (exp) == SSA_NAME
3752 && ! TREE_ADDRESSABLE (exp)
3753 && ! TREE_THIS_VOLATILE (exp)
3754 && ! DECL_NONLOCAL (exp)
3755 /* Don't regard global variables as simple. They may be
3756 allocated in ways unknown to the compiler (shared memory,
3757 #pragma weak, etc). */
3758 && ! TREE_PUBLIC (exp)
3759 && ! DECL_EXTERNAL (exp)
3760 /* Loading a static variable is unduly expensive, but global
3761 registers aren't expensive. */
3762 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3765 /* The following functions are subroutines to fold_range_test and allow it to
3766 try to change a logical combination of comparisons into a range test.
3769 X == 2 || X == 3 || X == 4 || X == 5
3773 (unsigned) (X - 2) <= 3
3775 We describe each set of comparisons as being either inside or outside
3776 a range, using a variable named like IN_P, and then describe the
3777 range with a lower and upper bound. If one of the bounds is omitted,
3778 it represents either the highest or lowest value of the type.
3780 In the comments below, we represent a range by two numbers in brackets
3781 preceded by a "+" to designate being inside that range, or a "-" to
3782 designate being outside that range, so the condition can be inverted by
3783 flipping the prefix. An omitted bound is represented by a "-". For
3784 example, "- [-, 10]" means being outside the range starting at the lowest
3785 possible value and ending at 10, in other words, being greater than 10.
3786 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3789 We set up things so that the missing bounds are handled in a consistent
3790 manner so neither a missing bound nor "true" and "false" need to be
3791 handled using a special case. */
3793 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3794 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3795 and UPPER1_P are nonzero if the respective argument is an upper bound
3796 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3797 must be specified for a comparison. ARG1 will be converted to ARG0's
3798 type if both are specified. */
3801 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3802 tree arg1, int upper1_p)
3808 /* If neither arg represents infinity, do the normal operation.
3809 Else, if not a comparison, return infinity. Else handle the special
3810 comparison rules. Note that most of the cases below won't occur, but
3811 are handled for consistency. */
3813 if (arg0 != 0 && arg1 != 0)
3815 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3816 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3818 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3821 if (TREE_CODE_CLASS (code) != tcc_comparison)
3824 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3825 for neither. In real maths, we cannot assume open ended ranges are
3826 the same. But, this is computer arithmetic, where numbers are finite.
3827 We can therefore make the transformation of any unbounded range with
3828 the value Z, Z being greater than any representable number. This permits
3829 us to treat unbounded ranges as equal. */
3830 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3831 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3835 result = sgn0 == sgn1;
3838 result = sgn0 != sgn1;
3841 result = sgn0 < sgn1;
3844 result = sgn0 <= sgn1;
3847 result = sgn0 > sgn1;
3850 result = sgn0 >= sgn1;
3856 return constant_boolean_node (result, type);
3859 /* Given EXP, a logical expression, set the range it is testing into
3860 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3861 actually being tested. *PLOW and *PHIGH will be made of the same
3862 type as the returned expression. If EXP is not a comparison, we
3863 will most likely not be returning a useful value and range. Set
3864 *STRICT_OVERFLOW_P to true if the return value is only valid
3865 because signed overflow is undefined; otherwise, do not change
3866 *STRICT_OVERFLOW_P. */
3869 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3870 bool *strict_overflow_p)
3872 enum tree_code code;
3873 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3874 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3876 tree low, high, n_low, n_high;
3877 location_t loc = EXPR_LOCATION (exp);
3879 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3880 and see if we can refine the range. Some of the cases below may not
3881 happen, but it doesn't seem worth worrying about this. We "continue"
3882 the outer loop when we've changed something; otherwise we "break"
3883 the switch, which will "break" the while. */
3886 low = high = build_int_cst (TREE_TYPE (exp), 0);
3890 code = TREE_CODE (exp);
3891 exp_type = TREE_TYPE (exp);
3893 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3895 if (TREE_OPERAND_LENGTH (exp) > 0)
3896 arg0 = TREE_OPERAND (exp, 0);
3897 if (TREE_CODE_CLASS (code) == tcc_comparison
3898 || TREE_CODE_CLASS (code) == tcc_unary
3899 || TREE_CODE_CLASS (code) == tcc_binary)
3900 arg0_type = TREE_TYPE (arg0);
3901 if (TREE_CODE_CLASS (code) == tcc_binary
3902 || TREE_CODE_CLASS (code) == tcc_comparison
3903 || (TREE_CODE_CLASS (code) == tcc_expression
3904 && TREE_OPERAND_LENGTH (exp) > 1))
3905 arg1 = TREE_OPERAND (exp, 1);
3910 case TRUTH_NOT_EXPR:
3911 in_p = ! in_p, exp = arg0;
3914 case EQ_EXPR: case NE_EXPR:
3915 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3916 /* We can only do something if the range is testing for zero
3917 and if the second operand is an integer constant. Note that
3918 saying something is "in" the range we make is done by
3919 complementing IN_P since it will set in the initial case of
3920 being not equal to zero; "out" is leaving it alone. */
3921 if (low == 0 || high == 0
3922 || ! integer_zerop (low) || ! integer_zerop (high)
3923 || TREE_CODE (arg1) != INTEGER_CST)
3928 case NE_EXPR: /* - [c, c] */
3931 case EQ_EXPR: /* + [c, c] */
3932 in_p = ! in_p, low = high = arg1;
3934 case GT_EXPR: /* - [-, c] */
3935 low = 0, high = arg1;
3937 case GE_EXPR: /* + [c, -] */
3938 in_p = ! in_p, low = arg1, high = 0;
3940 case LT_EXPR: /* - [c, -] */
3941 low = arg1, high = 0;
3943 case LE_EXPR: /* + [-, c] */
3944 in_p = ! in_p, low = 0, high = arg1;
3950 /* If this is an unsigned comparison, we also know that EXP is
3951 greater than or equal to zero. We base the range tests we make
3952 on that fact, so we record it here so we can parse existing
3953 range tests. We test arg0_type since often the return type
3954 of, e.g. EQ_EXPR, is boolean. */
3955 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3959 build_int_cst (arg0_type, 0),
3963 in_p = n_in_p, low = n_low, high = n_high;
3965 /* If the high bound is missing, but we have a nonzero low
3966 bound, reverse the range so it goes from zero to the low bound
3968 if (high == 0 && low && ! integer_zerop (low))
3971 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3972 integer_one_node, 0);
3973 low = build_int_cst (arg0_type, 0);
3981 /* (-x) IN [a,b] -> x in [-b, -a] */
3982 n_low = range_binop (MINUS_EXPR, exp_type,
3983 build_int_cst (exp_type, 0),
3985 n_high = range_binop (MINUS_EXPR, exp_type,
3986 build_int_cst (exp_type, 0),
3988 low = n_low, high = n_high;
3994 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3995 build_int_cst (exp_type, 1));
3996 SET_EXPR_LOCATION (exp, loc);
3999 case PLUS_EXPR: case MINUS_EXPR:
4000 if (TREE_CODE (arg1) != INTEGER_CST)
4003 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4004 move a constant to the other side. */
4005 if (!TYPE_UNSIGNED (arg0_type)
4006 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4009 /* If EXP is signed, any overflow in the computation is undefined,
4010 so we don't worry about it so long as our computations on
4011 the bounds don't overflow. For unsigned, overflow is defined
4012 and this is exactly the right thing. */
4013 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4014 arg0_type, low, 0, arg1, 0);
4015 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4016 arg0_type, high, 1, arg1, 0);
4017 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4018 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4021 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4022 *strict_overflow_p = true;
4024 /* Check for an unsigned range which has wrapped around the maximum
4025 value thus making n_high < n_low, and normalize it. */
4026 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4028 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4029 integer_one_node, 0);
4030 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4031 integer_one_node, 0);
4033 /* If the range is of the form +/- [ x+1, x ], we won't
4034 be able to normalize it. But then, it represents the
4035 whole range or the empty set, so make it
4037 if (tree_int_cst_equal (n_low, low)
4038 && tree_int_cst_equal (n_high, high))
4044 low = n_low, high = n_high;
4049 CASE_CONVERT: case NON_LVALUE_EXPR:
4050 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4053 if (! INTEGRAL_TYPE_P (arg0_type)
4054 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4055 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4058 n_low = low, n_high = high;
4061 n_low = fold_convert_loc (loc, arg0_type, n_low);
4064 n_high = fold_convert_loc (loc, arg0_type, n_high);
4067 /* If we're converting arg0 from an unsigned type, to exp,
4068 a signed type, we will be doing the comparison as unsigned.
4069 The tests above have already verified that LOW and HIGH
4072 So we have to ensure that we will handle large unsigned
4073 values the same way that the current signed bounds treat
4076 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4080 /* For fixed-point modes, we need to pass the saturating flag
4081 as the 2nd parameter. */
4082 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4083 equiv_type = lang_hooks.types.type_for_mode
4084 (TYPE_MODE (arg0_type),
4085 TYPE_SATURATING (arg0_type));
4087 equiv_type = lang_hooks.types.type_for_mode
4088 (TYPE_MODE (arg0_type), 1);
4090 /* A range without an upper bound is, naturally, unbounded.
4091 Since convert would have cropped a very large value, use
4092 the max value for the destination type. */
4094 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4095 : TYPE_MAX_VALUE (arg0_type);
4097 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4098 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4099 fold_convert_loc (loc, arg0_type,
4101 build_int_cst (arg0_type, 1));
4103 /* If the low bound is specified, "and" the range with the
4104 range for which the original unsigned value will be
4108 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4109 1, n_low, n_high, 1,
4110 fold_convert_loc (loc, arg0_type,
4115 in_p = (n_in_p == in_p);
4119 /* Otherwise, "or" the range with the range of the input
4120 that will be interpreted as negative. */
4121 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4122 0, n_low, n_high, 1,
4123 fold_convert_loc (loc, arg0_type,
4128 in_p = (in_p != n_in_p);
4133 low = n_low, high = n_high;
4143 /* If EXP is a constant, we can evaluate whether this is true or false. */
4144 if (TREE_CODE (exp) == INTEGER_CST)
4146 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4148 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4154 *pin_p = in_p, *plow = low, *phigh = high;
4158 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4159 type, TYPE, return an expression to test if EXP is in (or out of, depending
4160 on IN_P) the range. Return 0 if the test couldn't be created. */
4163 build_range_check (location_t loc, tree type, tree exp, int in_p,
4164 tree low, tree high)
4166 tree etype = TREE_TYPE (exp), value;
4168 #ifdef HAVE_canonicalize_funcptr_for_compare
4169 /* Disable this optimization for function pointer expressions
4170 on targets that require function pointer canonicalization. */
4171 if (HAVE_canonicalize_funcptr_for_compare
4172 && TREE_CODE (etype) == POINTER_TYPE
4173 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4179 value = build_range_check (loc, type, exp, 1, low, high);
4181 return invert_truthvalue_loc (loc, value);
4186 if (low == 0 && high == 0)
4187 return build_int_cst (type, 1);
4190 return fold_build2_loc (loc, LE_EXPR, type, exp,
4191 fold_convert_loc (loc, etype, high));
4194 return fold_build2_loc (loc, GE_EXPR, type, exp,
4195 fold_convert_loc (loc, etype, low));
4197 if (operand_equal_p (low, high, 0))
4198 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4199 fold_convert_loc (loc, etype, low));
4201 if (integer_zerop (low))
4203 if (! TYPE_UNSIGNED (etype))
4205 etype = unsigned_type_for (etype);
4206 high = fold_convert_loc (loc, etype, high);
4207 exp = fold_convert_loc (loc, etype, exp);
4209 return build_range_check (loc, type, exp, 1, 0, high);
4212 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4213 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4215 unsigned HOST_WIDE_INT lo;
4219 prec = TYPE_PRECISION (etype);
4220 if (prec <= HOST_BITS_PER_WIDE_INT)
4223 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4227 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4228 lo = (unsigned HOST_WIDE_INT) -1;
4231 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4233 if (TYPE_UNSIGNED (etype))
4235 tree signed_etype = signed_type_for (etype);
4236 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4238 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4240 etype = signed_etype;
4241 exp = fold_convert_loc (loc, etype, exp);
4243 return fold_build2_loc (loc, GT_EXPR, type, exp,
4244 build_int_cst (etype, 0));
4248 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4249 This requires wrap-around arithmetics for the type of the expression.
4250 First make sure that arithmetics in this type is valid, then make sure
4251 that it wraps around. */
4252 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4253 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4254 TYPE_UNSIGNED (etype));
4256 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4258 tree utype, minv, maxv;
4260 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4261 for the type in question, as we rely on this here. */
4262 utype = unsigned_type_for (etype);
4263 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4264 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4265 integer_one_node, 1);
4266 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4268 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4275 high = fold_convert_loc (loc, etype, high);
4276 low = fold_convert_loc (loc, etype, low);
4277 exp = fold_convert_loc (loc, etype, exp);
4279 value = const_binop (MINUS_EXPR, high, low);
4282 if (POINTER_TYPE_P (etype))
4284 if (value != 0 && !TREE_OVERFLOW (value))
4286 low = fold_convert_loc (loc, sizetype, low);
4287 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4288 return build_range_check (loc, type,
4289 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4291 1, build_int_cst (etype, 0), value);
4296 if (value != 0 && !TREE_OVERFLOW (value))
4297 return build_range_check (loc, type,
4298 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4299 1, build_int_cst (etype, 0), value);
4304 /* Return the predecessor of VAL in its type, handling the infinite case. */
4307 range_predecessor (tree val)
4309 tree type = TREE_TYPE (val);
4311 if (INTEGRAL_TYPE_P (type)
4312 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4315 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4318 /* Return the successor of VAL in its type, handling the infinite case. */
4321 range_successor (tree val)
4323 tree type = TREE_TYPE (val);
4325 if (INTEGRAL_TYPE_P (type)
4326 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4329 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4332 /* Given two ranges, see if we can merge them into one. Return 1 if we
4333 can, 0 if we can't. Set the output range into the specified parameters. */
4336 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4337 tree high0, int in1_p, tree low1, tree high1)
4345 int lowequal = ((low0 == 0 && low1 == 0)
4346 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4347 low0, 0, low1, 0)));
4348 int highequal = ((high0 == 0 && high1 == 0)
4349 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4350 high0, 1, high1, 1)));
4352 /* Make range 0 be the range that starts first, or ends last if they
4353 start at the same value. Swap them if it isn't. */
4354 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4357 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4358 high1, 1, high0, 1))))
4360 temp = in0_p, in0_p = in1_p, in1_p = temp;
4361 tem = low0, low0 = low1, low1 = tem;
4362 tem = high0, high0 = high1, high1 = tem;
4365 /* Now flag two cases, whether the ranges are disjoint or whether the
4366 second range is totally subsumed in the first. Note that the tests
4367 below are simplified by the ones above. */
4368 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4369 high0, 1, low1, 0));
4370 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4371 high1, 1, high0, 1));
4373 /* We now have four cases, depending on whether we are including or
4374 excluding the two ranges. */
4377 /* If they don't overlap, the result is false. If the second range
4378 is a subset it is the result. Otherwise, the range is from the start
4379 of the second to the end of the first. */
4381 in_p = 0, low = high = 0;
4383 in_p = 1, low = low1, high = high1;
4385 in_p = 1, low = low1, high = high0;
4388 else if (in0_p && ! in1_p)
4390 /* If they don't overlap, the result is the first range. If they are
4391 equal, the result is false. If the second range is a subset of the
4392 first, and the ranges begin at the same place, we go from just after
4393 the end of the second range to the end of the first. If the second
4394 range is not a subset of the first, or if it is a subset and both
4395 ranges end at the same place, the range starts at the start of the
4396 first range and ends just before the second range.
4397 Otherwise, we can't describe this as a single range. */
4399 in_p = 1, low = low0, high = high0;
4400 else if (lowequal && highequal)
4401 in_p = 0, low = high = 0;
4402 else if (subset && lowequal)
4404 low = range_successor (high1);
4409 /* We are in the weird situation where high0 > high1 but
4410 high1 has no successor. Punt. */
4414 else if (! subset || highequal)
4417 high = range_predecessor (low1);
4421 /* low0 < low1 but low1 has no predecessor. Punt. */
4429 else if (! in0_p && in1_p)
4431 /* If they don't overlap, the result is the second range. If the second
4432 is a subset of the first, the result is false. Otherwise,
4433 the range starts just after the first range and ends at the
4434 end of the second. */
4436 in_p = 1, low = low1, high = high1;
4437 else if (subset || highequal)
4438 in_p = 0, low = high = 0;
4441 low = range_successor (high0);
4446 /* high1 > high0 but high0 has no successor. Punt. */
4454 /* The case where we are excluding both ranges. Here the complex case
4455 is if they don't overlap. In that case, the only time we have a
4456 range is if they are adjacent. If the second is a subset of the
4457 first, the result is the first. Otherwise, the range to exclude
4458 starts at the beginning of the first range and ends at the end of the
4462 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4463 range_successor (high0),
4465 in_p = 0, low = low0, high = high1;
4468 /* Canonicalize - [min, x] into - [-, x]. */
4469 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4470 switch (TREE_CODE (TREE_TYPE (low0)))
4473 if (TYPE_PRECISION (TREE_TYPE (low0))
4474 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4478 if (tree_int_cst_equal (low0,
4479 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4483 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4484 && integer_zerop (low0))
4491 /* Canonicalize - [x, max] into - [x, -]. */
4492 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4493 switch (TREE_CODE (TREE_TYPE (high1)))
4496 if (TYPE_PRECISION (TREE_TYPE (high1))
4497 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4501 if (tree_int_cst_equal (high1,
4502 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4506 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4507 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4509 integer_one_node, 1)))
4516 /* The ranges might be also adjacent between the maximum and
4517 minimum values of the given type. For
4518 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4519 return + [x + 1, y - 1]. */
4520 if (low0 == 0 && high1 == 0)
4522 low = range_successor (high0);
4523 high = range_predecessor (low1);
4524 if (low == 0 || high == 0)
4534 in_p = 0, low = low0, high = high0;
4536 in_p = 0, low = low0, high = high1;
4539 *pin_p = in_p, *plow = low, *phigh = high;
4544 /* Subroutine of fold, looking inside expressions of the form
4545 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4546 of the COND_EXPR. This function is being used also to optimize
4547 A op B ? C : A, by reversing the comparison first.
4549 Return a folded expression whose code is not a COND_EXPR
4550 anymore, or NULL_TREE if no folding opportunity is found. */
4553 fold_cond_expr_with_comparison (location_t loc, tree type,
4554 tree arg0, tree arg1, tree arg2)
4556 enum tree_code comp_code = TREE_CODE (arg0);
4557 tree arg00 = TREE_OPERAND (arg0, 0);
4558 tree arg01 = TREE_OPERAND (arg0, 1);
4559 tree arg1_type = TREE_TYPE (arg1);
4565 /* If we have A op 0 ? A : -A, consider applying the following
4568 A == 0? A : -A same as -A
4569 A != 0? A : -A same as A
4570 A >= 0? A : -A same as abs (A)
4571 A > 0? A : -A same as abs (A)
4572 A <= 0? A : -A same as -abs (A)
4573 A < 0? A : -A same as -abs (A)
4575 None of these transformations work for modes with signed
4576 zeros. If A is +/-0, the first two transformations will
4577 change the sign of the result (from +0 to -0, or vice
4578 versa). The last four will fix the sign of the result,
4579 even though the original expressions could be positive or
4580 negative, depending on the sign of A.
4582 Note that all these transformations are correct if A is
4583 NaN, since the two alternatives (A and -A) are also NaNs. */
4584 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4585 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4586 ? real_zerop (arg01)
4587 : integer_zerop (arg01))
4588 && ((TREE_CODE (arg2) == NEGATE_EXPR
4589 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4590 /* In the case that A is of the form X-Y, '-A' (arg2) may
4591 have already been folded to Y-X, check for that. */
4592 || (TREE_CODE (arg1) == MINUS_EXPR
4593 && TREE_CODE (arg2) == MINUS_EXPR
4594 && operand_equal_p (TREE_OPERAND (arg1, 0),
4595 TREE_OPERAND (arg2, 1), 0)
4596 && operand_equal_p (TREE_OPERAND (arg1, 1),
4597 TREE_OPERAND (arg2, 0), 0))))
4602 tem = fold_convert_loc (loc, arg1_type, arg1);
4603 return pedantic_non_lvalue_loc (loc,
4604 fold_convert_loc (loc, type,
4605 negate_expr (tem)));
4608 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4611 if (flag_trapping_math)
4616 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4617 arg1 = fold_convert_loc (loc, signed_type_for
4618 (TREE_TYPE (arg1)), arg1);
4619 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4620 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4623 if (flag_trapping_math)
4627 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4628 arg1 = fold_convert_loc (loc, signed_type_for
4629 (TREE_TYPE (arg1)), arg1);
4630 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4631 return negate_expr (fold_convert_loc (loc, type, tem));
4633 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4637 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4638 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4639 both transformations are correct when A is NaN: A != 0
4640 is then true, and A == 0 is false. */
4642 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4643 && integer_zerop (arg01) && integer_zerop (arg2))
4645 if (comp_code == NE_EXPR)
4646 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4647 else if (comp_code == EQ_EXPR)
4648 return build_int_cst (type, 0);
4651 /* Try some transformations of A op B ? A : B.
4653 A == B? A : B same as B
4654 A != B? A : B same as A
4655 A >= B? A : B same as max (A, B)
4656 A > B? A : B same as max (B, A)
4657 A <= B? A : B same as min (A, B)
4658 A < B? A : B same as min (B, A)
4660 As above, these transformations don't work in the presence
4661 of signed zeros. For example, if A and B are zeros of
4662 opposite sign, the first two transformations will change
4663 the sign of the result. In the last four, the original
4664 expressions give different results for (A=+0, B=-0) and
4665 (A=-0, B=+0), but the transformed expressions do not.
4667 The first two transformations are correct if either A or B
4668 is a NaN. In the first transformation, the condition will
4669 be false, and B will indeed be chosen. In the case of the
4670 second transformation, the condition A != B will be true,
4671 and A will be chosen.
4673 The conversions to max() and min() are not correct if B is
4674 a number and A is not. The conditions in the original
4675 expressions will be false, so all four give B. The min()
4676 and max() versions would give a NaN instead. */
4677 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4678 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4679 /* Avoid these transformations if the COND_EXPR may be used
4680 as an lvalue in the C++ front-end. PR c++/19199. */
4682 || (strcmp (lang_hooks.name, "GNU C++") != 0
4683 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4684 || ! maybe_lvalue_p (arg1)
4685 || ! maybe_lvalue_p (arg2)))
4687 tree comp_op0 = arg00;
4688 tree comp_op1 = arg01;
4689 tree comp_type = TREE_TYPE (comp_op0);
4691 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4692 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4702 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4704 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4709 /* In C++ a ?: expression can be an lvalue, so put the
4710 operand which will be used if they are equal first
4711 so that we can convert this back to the
4712 corresponding COND_EXPR. */
4713 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4715 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4716 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4717 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4718 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4719 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4720 comp_op1, comp_op0);
4721 return pedantic_non_lvalue_loc (loc,
4722 fold_convert_loc (loc, type, tem));
4729 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4731 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4732 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4733 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4734 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4735 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4736 comp_op1, comp_op0);
4737 return pedantic_non_lvalue_loc (loc,
4738 fold_convert_loc (loc, type, tem));
4742 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4743 return pedantic_non_lvalue_loc (loc,
4744 fold_convert_loc (loc, type, arg2));
4747 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4748 return pedantic_non_lvalue_loc (loc,
4749 fold_convert_loc (loc, type, arg1));
4752 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4757 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4758 we might still be able to simplify this. For example,
4759 if C1 is one less or one more than C2, this might have started
4760 out as a MIN or MAX and been transformed by this function.
4761 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4763 if (INTEGRAL_TYPE_P (type)
4764 && TREE_CODE (arg01) == INTEGER_CST
4765 && TREE_CODE (arg2) == INTEGER_CST)
4769 if (TREE_CODE (arg1) == INTEGER_CST)
4771 /* We can replace A with C1 in this case. */
4772 arg1 = fold_convert_loc (loc, type, arg01);
4773 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4776 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4777 MIN_EXPR, to preserve the signedness of the comparison. */
4778 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4780 && operand_equal_p (arg01,
4781 const_binop (PLUS_EXPR, arg2,
4782 build_int_cst (type, 1)),
4785 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4786 fold_convert_loc (loc, TREE_TYPE (arg00),
4788 return pedantic_non_lvalue_loc (loc,
4789 fold_convert_loc (loc, type, tem));
4794 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4796 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4798 && operand_equal_p (arg01,
4799 const_binop (MINUS_EXPR, arg2,
4800 build_int_cst (type, 1)),
4803 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4804 fold_convert_loc (loc, TREE_TYPE (arg00),
4806 return pedantic_non_lvalue_loc (loc,
4807 fold_convert_loc (loc, type, tem));
4812 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4813 MAX_EXPR, to preserve the signedness of the comparison. */
4814 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4816 && operand_equal_p (arg01,
4817 const_binop (MINUS_EXPR, arg2,
4818 build_int_cst (type, 1)),
4821 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4822 fold_convert_loc (loc, TREE_TYPE (arg00),
4824 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4829 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4830 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4832 && operand_equal_p (arg01,
4833 const_binop (PLUS_EXPR, arg2,
4834 build_int_cst (type, 1)),
4837 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4838 fold_convert_loc (loc, TREE_TYPE (arg00),
4840 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4854 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4855 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4856 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4860 /* EXP is some logical combination of boolean tests. See if we can
4861 merge it into some range test. Return the new tree if so. */
4864 fold_range_test (location_t loc, enum tree_code code, tree type,
4867 int or_op = (code == TRUTH_ORIF_EXPR
4868 || code == TRUTH_OR_EXPR);
4869 int in0_p, in1_p, in_p;
4870 tree low0, low1, low, high0, high1, high;
4871 bool strict_overflow_p = false;
4872 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4873 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4875 const char * const warnmsg = G_("assuming signed overflow does not occur "
4876 "when simplifying range test");
4878 /* If this is an OR operation, invert both sides; we will invert
4879 again at the end. */
4881 in0_p = ! in0_p, in1_p = ! in1_p;
4883 /* If both expressions are the same, if we can merge the ranges, and we
4884 can build the range test, return it or it inverted. If one of the
4885 ranges is always true or always false, consider it to be the same
4886 expression as the other. */
4887 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4888 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4890 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4892 : rhs != 0 ? rhs : integer_zero_node,
4895 if (strict_overflow_p)
4896 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4897 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4900 /* On machines where the branch cost is expensive, if this is a
4901 short-circuited branch and the underlying object on both sides
4902 is the same, make a non-short-circuit operation. */
4903 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4904 && lhs != 0 && rhs != 0
4905 && (code == TRUTH_ANDIF_EXPR
4906 || code == TRUTH_ORIF_EXPR)
4907 && operand_equal_p (lhs, rhs, 0))
4909 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4910 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4911 which cases we can't do this. */
4912 if (simple_operand_p (lhs))
4914 tem = build2 (code == TRUTH_ANDIF_EXPR
4915 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4917 SET_EXPR_LOCATION (tem, loc);
4921 else if (lang_hooks.decls.global_bindings_p () == 0
4922 && ! CONTAINS_PLACEHOLDER_P (lhs))
4924 tree common = save_expr (lhs);
4926 if (0 != (lhs = build_range_check (loc, type, common,
4927 or_op ? ! in0_p : in0_p,
4929 && (0 != (rhs = build_range_check (loc, type, common,
4930 or_op ? ! in1_p : in1_p,
4933 if (strict_overflow_p)
4934 fold_overflow_warning (warnmsg,
4935 WARN_STRICT_OVERFLOW_COMPARISON);
4936 tem = build2 (code == TRUTH_ANDIF_EXPR
4937 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4939 SET_EXPR_LOCATION (tem, loc);
4948 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4949 bit value. Arrange things so the extra bits will be set to zero if and
4950 only if C is signed-extended to its full width. If MASK is nonzero,
4951 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4954 unextend (tree c, int p, int unsignedp, tree mask)
4956 tree type = TREE_TYPE (c);
4957 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4960 if (p == modesize || unsignedp)
4963 /* We work by getting just the sign bit into the low-order bit, then
4964 into the high-order bit, then sign-extend. We then XOR that value
4966 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4967 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4969 /* We must use a signed type in order to get an arithmetic right shift.
4970 However, we must also avoid introducing accidental overflows, so that
4971 a subsequent call to integer_zerop will work. Hence we must
4972 do the type conversion here. At this point, the constant is either
4973 zero or one, and the conversion to a signed type can never overflow.
4974 We could get an overflow if this conversion is done anywhere else. */
4975 if (TYPE_UNSIGNED (type))
4976 temp = fold_convert (signed_type_for (type), temp);
4978 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4979 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4981 temp = const_binop (BIT_AND_EXPR, temp,
4982 fold_convert (TREE_TYPE (c), mask));
4983 /* If necessary, convert the type back to match the type of C. */
4984 if (TYPE_UNSIGNED (type))
4985 temp = fold_convert (type, temp);
4987 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4990 /* For an expression that has the form
4994 we can drop one of the inner expressions and simplify to
4998 LOC is the location of the resulting expression. OP is the inner
4999 logical operation; the left-hand side in the examples above, while CMPOP
5000 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5001 removing a condition that guards another, as in
5002 (A != NULL && A->...) || A == NULL
5003 which we must not transform. If RHS_ONLY is true, only eliminate the
5004 right-most operand of the inner logical operation. */
5007 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5010 tree type = TREE_TYPE (cmpop);
5011 enum tree_code code = TREE_CODE (cmpop);
5012 enum tree_code truthop_code = TREE_CODE (op);
5013 tree lhs = TREE_OPERAND (op, 0);
5014 tree rhs = TREE_OPERAND (op, 1);
5015 tree orig_lhs = lhs, orig_rhs = rhs;
5016 enum tree_code rhs_code = TREE_CODE (rhs);
5017 enum tree_code lhs_code = TREE_CODE (lhs);
5018 enum tree_code inv_code;
5020 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5023 if (TREE_CODE_CLASS (code) != tcc_comparison)
5026 if (rhs_code == truthop_code)
5028 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5029 if (newrhs != NULL_TREE)
5032 rhs_code = TREE_CODE (rhs);
5035 if (lhs_code == truthop_code && !rhs_only)
5037 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5038 if (newlhs != NULL_TREE)
5041 lhs_code = TREE_CODE (lhs);
5045 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5046 if (inv_code == rhs_code
5047 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5048 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5050 if (!rhs_only && inv_code == lhs_code
5051 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5052 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5054 if (rhs != orig_rhs || lhs != orig_lhs)
5055 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5060 /* Find ways of folding logical expressions of LHS and RHS:
5061 Try to merge two comparisons to the same innermost item.
5062 Look for range tests like "ch >= '0' && ch <= '9'".
5063 Look for combinations of simple terms on machines with expensive branches
5064 and evaluate the RHS unconditionally.
5066 For example, if we have p->a == 2 && p->b == 4 and we can make an
5067 object large enough to span both A and B, we can do this with a comparison
5068 against the object ANDed with the a mask.
5070 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5071 operations to do this with one comparison.
5073 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5074 function and the one above.
5076 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5077 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5079 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5082 We return the simplified tree or 0 if no optimization is possible. */
5085 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5088 /* If this is the "or" of two comparisons, we can do something if
5089 the comparisons are NE_EXPR. If this is the "and", we can do something
5090 if the comparisons are EQ_EXPR. I.e.,
5091 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5093 WANTED_CODE is this operation code. For single bit fields, we can
5094 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5095 comparison for one-bit fields. */
5097 enum tree_code wanted_code;
5098 enum tree_code lcode, rcode;
5099 tree ll_arg, lr_arg, rl_arg, rr_arg;
5100 tree ll_inner, lr_inner, rl_inner, rr_inner;
5101 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5102 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5103 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5104 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5105 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5106 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5107 enum machine_mode lnmode, rnmode;
5108 tree ll_mask, lr_mask, rl_mask, rr_mask;
5109 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5110 tree l_const, r_const;
5111 tree lntype, rntype, result;
5112 HOST_WIDE_INT first_bit, end_bit;
5114 tree orig_lhs = lhs, orig_rhs = rhs;
5115 enum tree_code orig_code = code;
5117 /* Start by getting the comparison codes. Fail if anything is volatile.
5118 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5119 it were surrounded with a NE_EXPR. */
5121 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5124 lcode = TREE_CODE (lhs);
5125 rcode = TREE_CODE (rhs);
5127 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5129 lhs = build2 (NE_EXPR, truth_type, lhs,
5130 build_int_cst (TREE_TYPE (lhs), 0));
5134 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5136 rhs = build2 (NE_EXPR, truth_type, rhs,
5137 build_int_cst (TREE_TYPE (rhs), 0));
5141 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5142 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5145 ll_arg = TREE_OPERAND (lhs, 0);
5146 lr_arg = TREE_OPERAND (lhs, 1);
5147 rl_arg = TREE_OPERAND (rhs, 0);
5148 rr_arg = TREE_OPERAND (rhs, 1);
5150 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5151 if (simple_operand_p (ll_arg)
5152 && simple_operand_p (lr_arg))
5155 if (operand_equal_p (ll_arg, rl_arg, 0)
5156 && operand_equal_p (lr_arg, rr_arg, 0))
5158 result = combine_comparisons (loc, code, lcode, rcode,
5159 truth_type, ll_arg, lr_arg);
5163 else if (operand_equal_p (ll_arg, rr_arg, 0)
5164 && operand_equal_p (lr_arg, rl_arg, 0))
5166 result = combine_comparisons (loc, code, lcode,
5167 swap_tree_comparison (rcode),
5168 truth_type, ll_arg, lr_arg);
5174 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5175 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5177 /* If the RHS can be evaluated unconditionally and its operands are
5178 simple, it wins to evaluate the RHS unconditionally on machines
5179 with expensive branches. In this case, this isn't a comparison
5180 that can be merged. Avoid doing this if the RHS is a floating-point
5181 comparison since those can trap. */
5183 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5185 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5186 && simple_operand_p (rl_arg)
5187 && simple_operand_p (rr_arg))
5189 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5190 if (code == TRUTH_OR_EXPR
5191 && lcode == NE_EXPR && integer_zerop (lr_arg)
5192 && rcode == NE_EXPR && integer_zerop (rr_arg)
5193 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5194 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5196 result = build2 (NE_EXPR, truth_type,
5197 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5199 build_int_cst (TREE_TYPE (ll_arg), 0));
5200 goto fold_truthop_exit;
5203 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5204 if (code == TRUTH_AND_EXPR
5205 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5206 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5207 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5208 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5210 result = build2 (EQ_EXPR, truth_type,
5211 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5213 build_int_cst (TREE_TYPE (ll_arg), 0));
5214 goto fold_truthop_exit;
5217 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5219 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5221 result = build2 (code, truth_type, lhs, rhs);
5222 goto fold_truthop_exit;
5228 /* See if the comparisons can be merged. Then get all the parameters for
5231 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5232 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5236 ll_inner = decode_field_reference (loc, ll_arg,
5237 &ll_bitsize, &ll_bitpos, &ll_mode,
5238 &ll_unsignedp, &volatilep, &ll_mask,
5240 lr_inner = decode_field_reference (loc, lr_arg,
5241 &lr_bitsize, &lr_bitpos, &lr_mode,
5242 &lr_unsignedp, &volatilep, &lr_mask,
5244 rl_inner = decode_field_reference (loc, rl_arg,
5245 &rl_bitsize, &rl_bitpos, &rl_mode,
5246 &rl_unsignedp, &volatilep, &rl_mask,
5248 rr_inner = decode_field_reference (loc, rr_arg,
5249 &rr_bitsize, &rr_bitpos, &rr_mode,
5250 &rr_unsignedp, &volatilep, &rr_mask,
5253 /* It must be true that the inner operation on the lhs of each
5254 comparison must be the same if we are to be able to do anything.
5255 Then see if we have constants. If not, the same must be true for
5257 if (volatilep || ll_inner == 0 || rl_inner == 0
5258 || ! operand_equal_p (ll_inner, rl_inner, 0))
5261 if (TREE_CODE (lr_arg) == INTEGER_CST
5262 && TREE_CODE (rr_arg) == INTEGER_CST)
5263 l_const = lr_arg, r_const = rr_arg;
5264 else if (lr_inner == 0 || rr_inner == 0
5265 || ! operand_equal_p (lr_inner, rr_inner, 0))
5268 l_const = r_const = 0;
5270 /* If either comparison code is not correct for our logical operation,
5271 fail. However, we can convert a one-bit comparison against zero into
5272 the opposite comparison against that bit being set in the field. */
5274 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5275 if (lcode != wanted_code)
5277 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5279 /* Make the left operand unsigned, since we are only interested
5280 in the value of one bit. Otherwise we are doing the wrong
5289 /* This is analogous to the code for l_const above. */
5290 if (rcode != wanted_code)
5292 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5301 /* See if we can find a mode that contains both fields being compared on
5302 the left. If we can't, fail. Otherwise, update all constants and masks
5303 to be relative to a field of that size. */
5304 first_bit = MIN (ll_bitpos, rl_bitpos);
5305 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5306 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5307 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5309 if (lnmode == VOIDmode)
5312 lnbitsize = GET_MODE_BITSIZE (lnmode);
5313 lnbitpos = first_bit & ~ (lnbitsize - 1);
5314 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5315 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5317 if (BYTES_BIG_ENDIAN)
5319 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5320 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5323 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5324 size_int (xll_bitpos));
5325 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5326 size_int (xrl_bitpos));
5330 l_const = fold_convert_loc (loc, lntype, l_const);
5331 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5332 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5333 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5334 fold_build1_loc (loc, BIT_NOT_EXPR,
5337 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5339 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5344 r_const = fold_convert_loc (loc, lntype, r_const);
5345 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5346 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5347 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5348 fold_build1_loc (loc, BIT_NOT_EXPR,
5351 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5353 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5357 /* If the right sides are not constant, do the same for it. Also,
5358 disallow this optimization if a size or signedness mismatch occurs
5359 between the left and right sides. */
5362 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5363 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5364 /* Make sure the two fields on the right
5365 correspond to the left without being swapped. */
5366 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5369 first_bit = MIN (lr_bitpos, rr_bitpos);
5370 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5371 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5372 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5374 if (rnmode == VOIDmode)
5377 rnbitsize = GET_MODE_BITSIZE (rnmode);
5378 rnbitpos = first_bit & ~ (rnbitsize - 1);
5379 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5380 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5382 if (BYTES_BIG_ENDIAN)
5384 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5385 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5388 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5390 size_int (xlr_bitpos));
5391 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5393 size_int (xrr_bitpos));
5395 /* Make a mask that corresponds to both fields being compared.
5396 Do this for both items being compared. If the operands are the
5397 same size and the bits being compared are in the same position
5398 then we can do this by masking both and comparing the masked
5400 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5401 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5402 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5404 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5405 ll_unsignedp || rl_unsignedp);
5406 if (! all_ones_mask_p (ll_mask, lnbitsize))
5407 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5409 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5410 lr_unsignedp || rr_unsignedp);
5411 if (! all_ones_mask_p (lr_mask, rnbitsize))
5412 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5414 result = build2 (wanted_code, truth_type, lhs, rhs);
5415 goto fold_truthop_exit;
5418 /* There is still another way we can do something: If both pairs of
5419 fields being compared are adjacent, we may be able to make a wider
5420 field containing them both.
5422 Note that we still must mask the lhs/rhs expressions. Furthermore,
5423 the mask must be shifted to account for the shift done by
5424 make_bit_field_ref. */
5425 if ((ll_bitsize + ll_bitpos == rl_bitpos
5426 && lr_bitsize + lr_bitpos == rr_bitpos)
5427 || (ll_bitpos == rl_bitpos + rl_bitsize
5428 && lr_bitpos == rr_bitpos + rr_bitsize))
5432 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5433 ll_bitsize + rl_bitsize,
5434 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5435 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5436 lr_bitsize + rr_bitsize,
5437 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5439 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5440 size_int (MIN (xll_bitpos, xrl_bitpos)));
5441 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5442 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5444 /* Convert to the smaller type before masking out unwanted bits. */
5446 if (lntype != rntype)
5448 if (lnbitsize > rnbitsize)
5450 lhs = fold_convert_loc (loc, rntype, lhs);
5451 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5454 else if (lnbitsize < rnbitsize)
5456 rhs = fold_convert_loc (loc, lntype, rhs);
5457 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5462 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5463 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5465 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5466 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5468 result = build2 (wanted_code, truth_type, lhs, rhs);
5469 goto fold_truthop_exit;
5475 /* Handle the case of comparisons with constants. If there is something in
5476 common between the masks, those bits of the constants must be the same.
5477 If not, the condition is always false. Test for this to avoid generating
5478 incorrect code below. */
5479 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5480 if (! integer_zerop (result)
5481 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5482 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5484 if (wanted_code == NE_EXPR)
5486 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5487 return constant_boolean_node (true, truth_type);
5491 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5492 return constant_boolean_node (false, truth_type);
5496 /* Construct the expression we will return. First get the component
5497 reference we will make. Unless the mask is all ones the width of
5498 that field, perform the mask operation. Then compare with the
5500 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5501 ll_unsignedp || rl_unsignedp);
5503 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5504 if (! all_ones_mask_p (ll_mask, lnbitsize))
5506 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5507 SET_EXPR_LOCATION (result, loc);
5510 result = build2 (wanted_code, truth_type, result,
5511 const_binop (BIT_IOR_EXPR, l_const, r_const));
5514 SET_EXPR_LOCATION (result, loc);
5518 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5522 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5526 enum tree_code op_code;
5529 int consts_equal, consts_lt;
5532 STRIP_SIGN_NOPS (arg0);
5534 op_code = TREE_CODE (arg0);
5535 minmax_const = TREE_OPERAND (arg0, 1);
5536 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5537 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5538 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5539 inner = TREE_OPERAND (arg0, 0);
5541 /* If something does not permit us to optimize, return the original tree. */
5542 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5543 || TREE_CODE (comp_const) != INTEGER_CST
5544 || TREE_OVERFLOW (comp_const)
5545 || TREE_CODE (minmax_const) != INTEGER_CST
5546 || TREE_OVERFLOW (minmax_const))
5549 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5550 and GT_EXPR, doing the rest with recursive calls using logical
5554 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5557 = optimize_minmax_comparison (loc,
5558 invert_tree_comparison (code, false),
5561 return invert_truthvalue_loc (loc, tem);
5567 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5568 optimize_minmax_comparison
5569 (loc, EQ_EXPR, type, arg0, comp_const),
5570 optimize_minmax_comparison
5571 (loc, GT_EXPR, type, arg0, comp_const));
5574 if (op_code == MAX_EXPR && consts_equal)
5575 /* MAX (X, 0) == 0 -> X <= 0 */
5576 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5578 else if (op_code == MAX_EXPR && consts_lt)
5579 /* MAX (X, 0) == 5 -> X == 5 */
5580 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5582 else if (op_code == MAX_EXPR)
5583 /* MAX (X, 0) == -1 -> false */
5584 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5586 else if (consts_equal)
5587 /* MIN (X, 0) == 0 -> X >= 0 */
5588 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5591 /* MIN (X, 0) == 5 -> false */
5592 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5595 /* MIN (X, 0) == -1 -> X == -1 */
5596 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5599 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5600 /* MAX (X, 0) > 0 -> X > 0
5601 MAX (X, 0) > 5 -> X > 5 */
5602 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5604 else if (op_code == MAX_EXPR)
5605 /* MAX (X, 0) > -1 -> true */
5606 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5608 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5609 /* MIN (X, 0) > 0 -> false
5610 MIN (X, 0) > 5 -> false */
5611 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5614 /* MIN (X, 0) > -1 -> X > -1 */
5615 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5622 /* T is an integer expression that is being multiplied, divided, or taken a
5623 modulus (CODE says which and what kind of divide or modulus) by a
5624 constant C. See if we can eliminate that operation by folding it with
5625 other operations already in T. WIDE_TYPE, if non-null, is a type that
5626 should be used for the computation if wider than our type.
5628 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5629 (X * 2) + (Y * 4). We must, however, be assured that either the original
5630 expression would not overflow or that overflow is undefined for the type
5631 in the language in question.
5633 If we return a non-null expression, it is an equivalent form of the
5634 original computation, but need not be in the original type.
5636 We set *STRICT_OVERFLOW_P to true if the return values depends on
5637 signed overflow being undefined. Otherwise we do not change
5638 *STRICT_OVERFLOW_P. */
5641 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5642 bool *strict_overflow_p)
5644 /* To avoid exponential search depth, refuse to allow recursion past
5645 three levels. Beyond that (1) it's highly unlikely that we'll find
5646 something interesting and (2) we've probably processed it before
5647 when we built the inner expression. */
5656 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5663 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5664 bool *strict_overflow_p)
5666 tree type = TREE_TYPE (t);
5667 enum tree_code tcode = TREE_CODE (t);
5668 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5669 > GET_MODE_SIZE (TYPE_MODE (type)))
5670 ? wide_type : type);
5672 int same_p = tcode == code;
5673 tree op0 = NULL_TREE, op1 = NULL_TREE;
5674 bool sub_strict_overflow_p;
5676 /* Don't deal with constants of zero here; they confuse the code below. */
5677 if (integer_zerop (c))
5680 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5681 op0 = TREE_OPERAND (t, 0);
5683 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5684 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5686 /* Note that we need not handle conditional operations here since fold
5687 already handles those cases. So just do arithmetic here. */
5691 /* For a constant, we can always simplify if we are a multiply
5692 or (for divide and modulus) if it is a multiple of our constant. */
5693 if (code == MULT_EXPR
5694 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5695 return const_binop (code, fold_convert (ctype, t),
5696 fold_convert (ctype, c));
5699 CASE_CONVERT: case NON_LVALUE_EXPR:
5700 /* If op0 is an expression ... */
5701 if ((COMPARISON_CLASS_P (op0)
5702 || UNARY_CLASS_P (op0)
5703 || BINARY_CLASS_P (op0)
5704 || VL_EXP_CLASS_P (op0)
5705 || EXPRESSION_CLASS_P (op0))
5706 /* ... and has wrapping overflow, and its type is smaller
5707 than ctype, then we cannot pass through as widening. */
5708 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5709 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5710 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5711 && (TYPE_PRECISION (ctype)
5712 > TYPE_PRECISION (TREE_TYPE (op0))))
5713 /* ... or this is a truncation (t is narrower than op0),
5714 then we cannot pass through this narrowing. */
5715 || (TYPE_PRECISION (type)
5716 < TYPE_PRECISION (TREE_TYPE (op0)))
5717 /* ... or signedness changes for division or modulus,
5718 then we cannot pass through this conversion. */
5719 || (code != MULT_EXPR
5720 && (TYPE_UNSIGNED (ctype)
5721 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5722 /* ... or has undefined overflow while the converted to
5723 type has not, we cannot do the operation in the inner type
5724 as that would introduce undefined overflow. */
5725 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5726 && !TYPE_OVERFLOW_UNDEFINED (type))))
5729 /* Pass the constant down and see if we can make a simplification. If
5730 we can, replace this expression with the inner simplification for
5731 possible later conversion to our or some other type. */
5732 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5733 && TREE_CODE (t2) == INTEGER_CST
5734 && !TREE_OVERFLOW (t2)
5735 && (0 != (t1 = extract_muldiv (op0, t2, code,
5737 ? ctype : NULL_TREE,
5738 strict_overflow_p))))
5743 /* If widening the type changes it from signed to unsigned, then we
5744 must avoid building ABS_EXPR itself as unsigned. */
5745 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5747 tree cstype = (*signed_type_for) (ctype);
5748 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5751 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5752 return fold_convert (ctype, t1);
5756 /* If the constant is negative, we cannot simplify this. */
5757 if (tree_int_cst_sgn (c) == -1)
5761 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5763 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5766 case MIN_EXPR: case MAX_EXPR:
5767 /* If widening the type changes the signedness, then we can't perform
5768 this optimization as that changes the result. */
5769 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5772 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5773 sub_strict_overflow_p = false;
5774 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5775 &sub_strict_overflow_p)) != 0
5776 && (t2 = extract_muldiv (op1, c, code, wide_type,
5777 &sub_strict_overflow_p)) != 0)
5779 if (tree_int_cst_sgn (c) < 0)
5780 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5781 if (sub_strict_overflow_p)
5782 *strict_overflow_p = true;
5783 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5784 fold_convert (ctype, t2));
5788 case LSHIFT_EXPR: case RSHIFT_EXPR:
5789 /* If the second operand is constant, this is a multiplication
5790 or floor division, by a power of two, so we can treat it that
5791 way unless the multiplier or divisor overflows. Signed
5792 left-shift overflow is implementation-defined rather than
5793 undefined in C90, so do not convert signed left shift into
5795 if (TREE_CODE (op1) == INTEGER_CST
5796 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5797 /* const_binop may not detect overflow correctly,
5798 so check for it explicitly here. */
5799 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5800 && TREE_INT_CST_HIGH (op1) == 0
5801 && 0 != (t1 = fold_convert (ctype,
5802 const_binop (LSHIFT_EXPR,
5805 && !TREE_OVERFLOW (t1))
5806 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5807 ? MULT_EXPR : FLOOR_DIV_EXPR,
5809 fold_convert (ctype, op0),
5811 c, code, wide_type, strict_overflow_p);
5814 case PLUS_EXPR: case MINUS_EXPR:
5815 /* See if we can eliminate the operation on both sides. If we can, we
5816 can return a new PLUS or MINUS. If we can't, the only remaining
5817 cases where we can do anything are if the second operand is a
5819 sub_strict_overflow_p = false;
5820 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5821 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5822 if (t1 != 0 && t2 != 0
5823 && (code == MULT_EXPR
5824 /* If not multiplication, we can only do this if both operands
5825 are divisible by c. */
5826 || (multiple_of_p (ctype, op0, c)
5827 && multiple_of_p (ctype, op1, c))))
5829 if (sub_strict_overflow_p)
5830 *strict_overflow_p = true;
5831 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5832 fold_convert (ctype, t2));
5835 /* If this was a subtraction, negate OP1 and set it to be an addition.
5836 This simplifies the logic below. */
5837 if (tcode == MINUS_EXPR)
5839 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5840 /* If OP1 was not easily negatable, the constant may be OP0. */
5841 if (TREE_CODE (op0) == INTEGER_CST)
5852 if (TREE_CODE (op1) != INTEGER_CST)
5855 /* If either OP1 or C are negative, this optimization is not safe for
5856 some of the division and remainder types while for others we need
5857 to change the code. */
5858 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5860 if (code == CEIL_DIV_EXPR)
5861 code = FLOOR_DIV_EXPR;
5862 else if (code == FLOOR_DIV_EXPR)
5863 code = CEIL_DIV_EXPR;
5864 else if (code != MULT_EXPR
5865 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5869 /* If it's a multiply or a division/modulus operation of a multiple
5870 of our constant, do the operation and verify it doesn't overflow. */
5871 if (code == MULT_EXPR
5872 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5874 op1 = const_binop (code, fold_convert (ctype, op1),
5875 fold_convert (ctype, c));
5876 /* We allow the constant to overflow with wrapping semantics. */
5878 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5884 /* If we have an unsigned type is not a sizetype, we cannot widen
5885 the operation since it will change the result if the original
5886 computation overflowed. */
5887 if (TYPE_UNSIGNED (ctype)
5888 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5892 /* If we were able to eliminate our operation from the first side,
5893 apply our operation to the second side and reform the PLUS. */
5894 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5895 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5897 /* The last case is if we are a multiply. In that case, we can
5898 apply the distributive law to commute the multiply and addition
5899 if the multiplication of the constants doesn't overflow. */
5900 if (code == MULT_EXPR)
5901 return fold_build2 (tcode, ctype,
5902 fold_build2 (code, ctype,
5903 fold_convert (ctype, op0),
5904 fold_convert (ctype, c)),
5910 /* We have a special case here if we are doing something like
5911 (C * 8) % 4 since we know that's zero. */
5912 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5913 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5914 /* If the multiplication can overflow we cannot optimize this.
5915 ??? Until we can properly mark individual operations as
5916 not overflowing we need to treat sizetype special here as
5917 stor-layout relies on this opimization to make
5918 DECL_FIELD_BIT_OFFSET always a constant. */
5919 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5920 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5921 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5922 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5923 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5925 *strict_overflow_p = true;
5926 return omit_one_operand (type, integer_zero_node, op0);
5929 /* ... fall through ... */
5931 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5932 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5933 /* If we can extract our operation from the LHS, do so and return a
5934 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5935 do something only if the second operand is a constant. */
5937 && (t1 = extract_muldiv (op0, c, code, wide_type,
5938 strict_overflow_p)) != 0)
5939 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5940 fold_convert (ctype, op1));
5941 else if (tcode == MULT_EXPR && code == MULT_EXPR
5942 && (t1 = extract_muldiv (op1, c, code, wide_type,
5943 strict_overflow_p)) != 0)
5944 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5945 fold_convert (ctype, t1));
5946 else if (TREE_CODE (op1) != INTEGER_CST)
5949 /* If these are the same operation types, we can associate them
5950 assuming no overflow. */
5952 && 0 != (t1 = int_const_binop (MULT_EXPR,
5953 fold_convert (ctype, op1),
5954 fold_convert (ctype, c), 1))
5955 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5956 (TYPE_UNSIGNED (ctype)
5957 && tcode != MULT_EXPR) ? -1 : 1,
5958 TREE_OVERFLOW (t1)))
5959 && !TREE_OVERFLOW (t1))
5960 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5962 /* If these operations "cancel" each other, we have the main
5963 optimizations of this pass, which occur when either constant is a
5964 multiple of the other, in which case we replace this with either an
5965 operation or CODE or TCODE.
5967 If we have an unsigned type that is not a sizetype, we cannot do
5968 this since it will change the result if the original computation
5970 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5971 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5972 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5973 || (tcode == MULT_EXPR
5974 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5975 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5976 && code != MULT_EXPR)))
5978 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5980 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5981 *strict_overflow_p = true;
5982 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5983 fold_convert (ctype,
5984 const_binop (TRUNC_DIV_EXPR,
5987 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5989 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5990 *strict_overflow_p = true;
5991 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5992 fold_convert (ctype,
5993 const_binop (TRUNC_DIV_EXPR,
6006 /* Return a node which has the indicated constant VALUE (either 0 or
6007 1), and is of the indicated TYPE. */
6010 constant_boolean_node (int value, tree type)
6012 if (type == integer_type_node)
6013 return value ? integer_one_node : integer_zero_node;
6014 else if (type == boolean_type_node)
6015 return value ? boolean_true_node : boolean_false_node;
6017 return build_int_cst (type, value);
6021 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6022 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6023 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6024 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6025 COND is the first argument to CODE; otherwise (as in the example
6026 given here), it is the second argument. TYPE is the type of the
6027 original expression. Return NULL_TREE if no simplification is
6031 fold_binary_op_with_conditional_arg (location_t loc,
6032 enum tree_code code,
6033 tree type, tree op0, tree op1,
6034 tree cond, tree arg, int cond_first_p)
6036 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6037 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6038 tree test, true_value, false_value;
6039 tree lhs = NULL_TREE;
6040 tree rhs = NULL_TREE;
6042 if (TREE_CODE (cond) == COND_EXPR)
6044 test = TREE_OPERAND (cond, 0);
6045 true_value = TREE_OPERAND (cond, 1);
6046 false_value = TREE_OPERAND (cond, 2);
6047 /* If this operand throws an expression, then it does not make
6048 sense to try to perform a logical or arithmetic operation
6050 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6052 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6057 tree testtype = TREE_TYPE (cond);
6059 true_value = constant_boolean_node (true, testtype);
6060 false_value = constant_boolean_node (false, testtype);
6063 /* This transformation is only worthwhile if we don't have to wrap ARG
6064 in a SAVE_EXPR and the operation can be simplified on at least one
6065 of the branches once its pushed inside the COND_EXPR. */
6066 if (!TREE_CONSTANT (arg)
6067 && (TREE_SIDE_EFFECTS (arg)
6068 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6071 arg = fold_convert_loc (loc, arg_type, arg);
6074 true_value = fold_convert_loc (loc, cond_type, true_value);
6076 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6078 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6082 false_value = fold_convert_loc (loc, cond_type, false_value);
6084 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6086 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6089 /* Check that we have simplified at least one of the branches. */
6090 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6093 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6097 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6099 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6100 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6101 ADDEND is the same as X.
6103 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6104 and finite. The problematic cases are when X is zero, and its mode
6105 has signed zeros. In the case of rounding towards -infinity,
6106 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6107 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6110 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6112 if (!real_zerop (addend))
6115 /* Don't allow the fold with -fsignaling-nans. */
6116 if (HONOR_SNANS (TYPE_MODE (type)))
6119 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6120 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6123 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6124 if (TREE_CODE (addend) == REAL_CST
6125 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6128 /* The mode has signed zeros, and we have to honor their sign.
6129 In this situation, there is only one case we can return true for.
6130 X - 0 is the same as X unless rounding towards -infinity is
6132 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6135 /* Subroutine of fold() that checks comparisons of built-in math
6136 functions against real constants.
6138 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6139 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6140 is the type of the result and ARG0 and ARG1 are the operands of the
6141 comparison. ARG1 must be a TREE_REAL_CST.
6143 The function returns the constant folded tree if a simplification
6144 can be made, and NULL_TREE otherwise. */
6147 fold_mathfn_compare (location_t loc,
6148 enum built_in_function fcode, enum tree_code code,
6149 tree type, tree arg0, tree arg1)
6153 if (BUILTIN_SQRT_P (fcode))
6155 tree arg = CALL_EXPR_ARG (arg0, 0);
6156 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6158 c = TREE_REAL_CST (arg1);
6159 if (REAL_VALUE_NEGATIVE (c))
6161 /* sqrt(x) < y is always false, if y is negative. */
6162 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6163 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6165 /* sqrt(x) > y is always true, if y is negative and we
6166 don't care about NaNs, i.e. negative values of x. */
6167 if (code == NE_EXPR || !HONOR_NANS (mode))
6168 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6170 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6171 return fold_build2_loc (loc, GE_EXPR, type, arg,
6172 build_real (TREE_TYPE (arg), dconst0));
6174 else if (code == GT_EXPR || code == GE_EXPR)
6178 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6179 real_convert (&c2, mode, &c2);
6181 if (REAL_VALUE_ISINF (c2))
6183 /* sqrt(x) > y is x == +Inf, when y is very large. */
6184 if (HONOR_INFINITIES (mode))
6185 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6186 build_real (TREE_TYPE (arg), c2));
6188 /* sqrt(x) > y is always false, when y is very large
6189 and we don't care about infinities. */
6190 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6193 /* sqrt(x) > c is the same as x > c*c. */
6194 return fold_build2_loc (loc, code, type, arg,
6195 build_real (TREE_TYPE (arg), c2));
6197 else if (code == LT_EXPR || code == LE_EXPR)
6201 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6202 real_convert (&c2, mode, &c2);
6204 if (REAL_VALUE_ISINF (c2))
6206 /* sqrt(x) < y is always true, when y is a very large
6207 value and we don't care about NaNs or Infinities. */
6208 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6209 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6211 /* sqrt(x) < y is x != +Inf when y is very large and we
6212 don't care about NaNs. */
6213 if (! HONOR_NANS (mode))
6214 return fold_build2_loc (loc, NE_EXPR, type, arg,
6215 build_real (TREE_TYPE (arg), c2));
6217 /* sqrt(x) < y is x >= 0 when y is very large and we
6218 don't care about Infinities. */
6219 if (! HONOR_INFINITIES (mode))
6220 return fold_build2_loc (loc, GE_EXPR, type, arg,
6221 build_real (TREE_TYPE (arg), dconst0));
6223 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6224 if (lang_hooks.decls.global_bindings_p () != 0
6225 || CONTAINS_PLACEHOLDER_P (arg))
6228 arg = save_expr (arg);
6229 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6230 fold_build2_loc (loc, GE_EXPR, type, arg,
6231 build_real (TREE_TYPE (arg),
6233 fold_build2_loc (loc, NE_EXPR, type, arg,
6234 build_real (TREE_TYPE (arg),
6238 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6239 if (! HONOR_NANS (mode))
6240 return fold_build2_loc (loc, code, type, arg,
6241 build_real (TREE_TYPE (arg), c2));
6243 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6244 if (lang_hooks.decls.global_bindings_p () == 0
6245 && ! CONTAINS_PLACEHOLDER_P (arg))
6247 arg = save_expr (arg);
6248 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6249 fold_build2_loc (loc, GE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg),
6252 fold_build2_loc (loc, code, type, arg,
6253 build_real (TREE_TYPE (arg),
6262 /* Subroutine of fold() that optimizes comparisons against Infinities,
6263 either +Inf or -Inf.
6265 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6266 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6267 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6269 The function returns the constant folded tree if a simplification
6270 can be made, and NULL_TREE otherwise. */
6273 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6274 tree arg0, tree arg1)
6276 enum machine_mode mode;
6277 REAL_VALUE_TYPE max;
6281 mode = TYPE_MODE (TREE_TYPE (arg0));
6283 /* For negative infinity swap the sense of the comparison. */
6284 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6286 code = swap_tree_comparison (code);
6291 /* x > +Inf is always false, if with ignore sNANs. */
6292 if (HONOR_SNANS (mode))
6294 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6297 /* x <= +Inf is always true, if we don't case about NaNs. */
6298 if (! HONOR_NANS (mode))
6299 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6301 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6302 if (lang_hooks.decls.global_bindings_p () == 0
6303 && ! CONTAINS_PLACEHOLDER_P (arg0))
6305 arg0 = save_expr (arg0);
6306 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6312 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6313 real_maxval (&max, neg, mode);
6314 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6315 arg0, build_real (TREE_TYPE (arg0), max));
6318 /* x < +Inf is always equal to x <= DBL_MAX. */
6319 real_maxval (&max, neg, mode);
6320 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6321 arg0, build_real (TREE_TYPE (arg0), max));
6324 /* x != +Inf is always equal to !(x > DBL_MAX). */
6325 real_maxval (&max, neg, mode);
6326 if (! HONOR_NANS (mode))
6327 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6328 arg0, build_real (TREE_TYPE (arg0), max));
6330 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6332 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6341 /* Subroutine of fold() that optimizes comparisons of a division by
6342 a nonzero integer constant against an integer constant, i.e.
6345 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6346 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6347 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6349 The function returns the constant folded tree if a simplification
6350 can be made, and NULL_TREE otherwise. */
6353 fold_div_compare (location_t loc,
6354 enum tree_code code, tree type, tree arg0, tree arg1)
6356 tree prod, tmp, hi, lo;
6357 tree arg00 = TREE_OPERAND (arg0, 0);
6358 tree arg01 = TREE_OPERAND (arg0, 1);
6360 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6364 /* We have to do this the hard way to detect unsigned overflow.
6365 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6366 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6367 TREE_INT_CST_HIGH (arg01),
6368 TREE_INT_CST_LOW (arg1),
6369 TREE_INT_CST_HIGH (arg1),
6370 &val.low, &val.high, unsigned_p);
6371 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6372 neg_overflow = false;
6376 tmp = int_const_binop (MINUS_EXPR, arg01,
6377 build_int_cst (TREE_TYPE (arg01), 1), 0);
6380 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6381 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6382 TREE_INT_CST_HIGH (prod),
6383 TREE_INT_CST_LOW (tmp),
6384 TREE_INT_CST_HIGH (tmp),
6385 &val.low, &val.high, unsigned_p);
6386 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6387 -1, overflow | TREE_OVERFLOW (prod));
6389 else if (tree_int_cst_sgn (arg01) >= 0)
6391 tmp = int_const_binop (MINUS_EXPR, arg01,
6392 build_int_cst (TREE_TYPE (arg01), 1), 0);
6393 switch (tree_int_cst_sgn (arg1))
6396 neg_overflow = true;
6397 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6402 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6407 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6417 /* A negative divisor reverses the relational operators. */
6418 code = swap_tree_comparison (code);
6420 tmp = int_const_binop (PLUS_EXPR, arg01,
6421 build_int_cst (TREE_TYPE (arg01), 1), 0);
6422 switch (tree_int_cst_sgn (arg1))
6425 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6430 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6435 neg_overflow = true;
6436 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6448 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6449 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6450 if (TREE_OVERFLOW (hi))
6451 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6452 if (TREE_OVERFLOW (lo))
6453 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6454 return build_range_check (loc, type, arg00, 1, lo, hi);
6457 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6458 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6459 if (TREE_OVERFLOW (hi))
6460 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6461 if (TREE_OVERFLOW (lo))
6462 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6463 return build_range_check (loc, type, arg00, 0, lo, hi);
6466 if (TREE_OVERFLOW (lo))
6468 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6469 return omit_one_operand_loc (loc, type, tmp, arg00);
6471 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6474 if (TREE_OVERFLOW (hi))
6476 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6477 return omit_one_operand_loc (loc, type, tmp, arg00);
6479 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6482 if (TREE_OVERFLOW (hi))
6484 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6485 return omit_one_operand_loc (loc, type, tmp, arg00);
6487 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6490 if (TREE_OVERFLOW (lo))
6492 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6493 return omit_one_operand_loc (loc, type, tmp, arg00);
6495 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6505 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6506 equality/inequality test, then return a simplified form of the test
6507 using a sign testing. Otherwise return NULL. TYPE is the desired
6511 fold_single_bit_test_into_sign_test (location_t loc,
6512 enum tree_code code, tree arg0, tree arg1,
6515 /* If this is testing a single bit, we can optimize the test. */
6516 if ((code == NE_EXPR || code == EQ_EXPR)
6517 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6518 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6520 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6521 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6522 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6524 if (arg00 != NULL_TREE
6525 /* This is only a win if casting to a signed type is cheap,
6526 i.e. when arg00's type is not a partial mode. */
6527 && TYPE_PRECISION (TREE_TYPE (arg00))
6528 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6530 tree stype = signed_type_for (TREE_TYPE (arg00));
6531 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6533 fold_convert_loc (loc, stype, arg00),
6534 build_int_cst (stype, 0));
6541 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6542 equality/inequality test, then return a simplified form of
6543 the test using shifts and logical operations. Otherwise return
6544 NULL. TYPE is the desired result type. */
6547 fold_single_bit_test (location_t loc, enum tree_code code,
6548 tree arg0, tree arg1, tree result_type)
6550 /* If this is testing a single bit, we can optimize the test. */
6551 if ((code == NE_EXPR || code == EQ_EXPR)
6552 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6555 tree inner = TREE_OPERAND (arg0, 0);
6556 tree type = TREE_TYPE (arg0);
6557 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6558 enum machine_mode operand_mode = TYPE_MODE (type);
6560 tree signed_type, unsigned_type, intermediate_type;
6563 /* First, see if we can fold the single bit test into a sign-bit
6565 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6570 /* Otherwise we have (A & C) != 0 where C is a single bit,
6571 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6572 Similarly for (A & C) == 0. */
6574 /* If INNER is a right shift of a constant and it plus BITNUM does
6575 not overflow, adjust BITNUM and INNER. */
6576 if (TREE_CODE (inner) == RSHIFT_EXPR
6577 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6578 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6579 && bitnum < TYPE_PRECISION (type)
6580 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6581 bitnum - TYPE_PRECISION (type)))
6583 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6584 inner = TREE_OPERAND (inner, 0);
6587 /* If we are going to be able to omit the AND below, we must do our
6588 operations as unsigned. If we must use the AND, we have a choice.
6589 Normally unsigned is faster, but for some machines signed is. */
6590 #ifdef LOAD_EXTEND_OP
6591 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6592 && !flag_syntax_only) ? 0 : 1;
6597 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6598 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6599 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6600 inner = fold_convert_loc (loc, intermediate_type, inner);
6603 inner = build2 (RSHIFT_EXPR, intermediate_type,
6604 inner, size_int (bitnum));
6606 one = build_int_cst (intermediate_type, 1);
6608 if (code == EQ_EXPR)
6609 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6611 /* Put the AND last so it can combine with more things. */
6612 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6614 /* Make sure to return the proper type. */
6615 inner = fold_convert_loc (loc, result_type, inner);
6622 /* Check whether we are allowed to reorder operands arg0 and arg1,
6623 such that the evaluation of arg1 occurs before arg0. */
6626 reorder_operands_p (const_tree arg0, const_tree arg1)
6628 if (! flag_evaluation_order)
6630 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6632 return ! TREE_SIDE_EFFECTS (arg0)
6633 && ! TREE_SIDE_EFFECTS (arg1);
6636 /* Test whether it is preferable two swap two operands, ARG0 and
6637 ARG1, for example because ARG0 is an integer constant and ARG1
6638 isn't. If REORDER is true, only recommend swapping if we can
6639 evaluate the operands in reverse order. */
6642 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6644 STRIP_SIGN_NOPS (arg0);
6645 STRIP_SIGN_NOPS (arg1);
6647 if (TREE_CODE (arg1) == INTEGER_CST)
6649 if (TREE_CODE (arg0) == INTEGER_CST)
6652 if (TREE_CODE (arg1) == REAL_CST)
6654 if (TREE_CODE (arg0) == REAL_CST)
6657 if (TREE_CODE (arg1) == FIXED_CST)
6659 if (TREE_CODE (arg0) == FIXED_CST)
6662 if (TREE_CODE (arg1) == COMPLEX_CST)
6664 if (TREE_CODE (arg0) == COMPLEX_CST)
6667 if (TREE_CONSTANT (arg1))
6669 if (TREE_CONSTANT (arg0))
6672 if (optimize_function_for_size_p (cfun))
6675 if (reorder && flag_evaluation_order
6676 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6679 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6680 for commutative and comparison operators. Ensuring a canonical
6681 form allows the optimizers to find additional redundancies without
6682 having to explicitly check for both orderings. */
6683 if (TREE_CODE (arg0) == SSA_NAME
6684 && TREE_CODE (arg1) == SSA_NAME
6685 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6688 /* Put SSA_NAMEs last. */
6689 if (TREE_CODE (arg1) == SSA_NAME)
6691 if (TREE_CODE (arg0) == SSA_NAME)
6694 /* Put variables last. */
6703 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6704 ARG0 is extended to a wider type. */
6707 fold_widened_comparison (location_t loc, enum tree_code code,
6708 tree type, tree arg0, tree arg1)
6710 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6712 tree shorter_type, outer_type;
6716 if (arg0_unw == arg0)
6718 shorter_type = TREE_TYPE (arg0_unw);
6720 #ifdef HAVE_canonicalize_funcptr_for_compare
6721 /* Disable this optimization if we're casting a function pointer
6722 type on targets that require function pointer canonicalization. */
6723 if (HAVE_canonicalize_funcptr_for_compare
6724 && TREE_CODE (shorter_type) == POINTER_TYPE
6725 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6729 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6732 arg1_unw = get_unwidened (arg1, NULL_TREE);
6734 /* If possible, express the comparison in the shorter mode. */
6735 if ((code == EQ_EXPR || code == NE_EXPR
6736 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6737 && (TREE_TYPE (arg1_unw) == shorter_type
6738 || ((TYPE_PRECISION (shorter_type)
6739 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6740 && (TYPE_UNSIGNED (shorter_type)
6741 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6742 || (TREE_CODE (arg1_unw) == INTEGER_CST
6743 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6744 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6745 && int_fits_type_p (arg1_unw, shorter_type))))
6746 return fold_build2_loc (loc, code, type, arg0_unw,
6747 fold_convert_loc (loc, shorter_type, arg1_unw));
6749 if (TREE_CODE (arg1_unw) != INTEGER_CST
6750 || TREE_CODE (shorter_type) != INTEGER_TYPE
6751 || !int_fits_type_p (arg1_unw, shorter_type))
6754 /* If we are comparing with the integer that does not fit into the range
6755 of the shorter type, the result is known. */
6756 outer_type = TREE_TYPE (arg1_unw);
6757 min = lower_bound_in_type (outer_type, shorter_type);
6758 max = upper_bound_in_type (outer_type, shorter_type);
6760 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6762 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6769 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6774 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6780 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6782 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6787 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6789 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6798 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6799 ARG0 just the signedness is changed. */
6802 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6803 tree arg0, tree arg1)
6806 tree inner_type, outer_type;
6808 if (!CONVERT_EXPR_P (arg0))
6811 outer_type = TREE_TYPE (arg0);
6812 arg0_inner = TREE_OPERAND (arg0, 0);
6813 inner_type = TREE_TYPE (arg0_inner);
6815 #ifdef HAVE_canonicalize_funcptr_for_compare
6816 /* Disable this optimization if we're casting a function pointer
6817 type on targets that require function pointer canonicalization. */
6818 if (HAVE_canonicalize_funcptr_for_compare
6819 && TREE_CODE (inner_type) == POINTER_TYPE
6820 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6824 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6827 if (TREE_CODE (arg1) != INTEGER_CST
6828 && !(CONVERT_EXPR_P (arg1)
6829 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6832 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6833 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6838 if (TREE_CODE (arg1) == INTEGER_CST)
6839 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6840 0, TREE_OVERFLOW (arg1));
6842 arg1 = fold_convert_loc (loc, inner_type, arg1);
6844 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6847 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6848 step of the array. Reconstructs s and delta in the case of s *
6849 delta being an integer constant (and thus already folded). ADDR is
6850 the address. MULT is the multiplicative expression. If the
6851 function succeeds, the new address expression is returned.
6852 Otherwise NULL_TREE is returned. LOC is the location of the
6853 resulting expression. */
6856 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6858 tree s, delta, step;
6859 tree ref = TREE_OPERAND (addr, 0), pref;
6864 /* Strip the nops that might be added when converting op1 to sizetype. */
6867 /* Canonicalize op1 into a possibly non-constant delta
6868 and an INTEGER_CST s. */
6869 if (TREE_CODE (op1) == MULT_EXPR)
6871 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6876 if (TREE_CODE (arg0) == INTEGER_CST)
6881 else if (TREE_CODE (arg1) == INTEGER_CST)
6889 else if (TREE_CODE (op1) == INTEGER_CST)
6896 /* Simulate we are delta * 1. */
6898 s = integer_one_node;
6901 for (;; ref = TREE_OPERAND (ref, 0))
6903 if (TREE_CODE (ref) == ARRAY_REF)
6907 /* Remember if this was a multi-dimensional array. */
6908 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6911 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6914 itype = TREE_TYPE (domain);
6916 step = array_ref_element_size (ref);
6917 if (TREE_CODE (step) != INTEGER_CST)
6922 if (! tree_int_cst_equal (step, s))
6927 /* Try if delta is a multiple of step. */
6928 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6934 /* Only fold here if we can verify we do not overflow one
6935 dimension of a multi-dimensional array. */
6940 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6941 || !TYPE_MAX_VALUE (domain)
6942 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6945 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6946 fold_convert_loc (loc, itype,
6947 TREE_OPERAND (ref, 1)),
6948 fold_convert_loc (loc, itype, delta));
6950 || TREE_CODE (tmp) != INTEGER_CST
6951 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6960 if (!handled_component_p (ref))
6964 /* We found the suitable array reference. So copy everything up to it,
6965 and replace the index. */
6967 pref = TREE_OPERAND (addr, 0);
6968 ret = copy_node (pref);
6969 SET_EXPR_LOCATION (ret, loc);
6974 pref = TREE_OPERAND (pref, 0);
6975 TREE_OPERAND (pos, 0) = copy_node (pref);
6976 pos = TREE_OPERAND (pos, 0);
6979 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6980 fold_convert_loc (loc, itype,
6981 TREE_OPERAND (pos, 1)),
6982 fold_convert_loc (loc, itype, delta));
6984 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6988 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6989 means A >= Y && A != MAX, but in this case we know that
6990 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6993 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6995 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6997 if (TREE_CODE (bound) == LT_EXPR)
6998 a = TREE_OPERAND (bound, 0);
6999 else if (TREE_CODE (bound) == GT_EXPR)
7000 a = TREE_OPERAND (bound, 1);
7004 typea = TREE_TYPE (a);
7005 if (!INTEGRAL_TYPE_P (typea)
7006 && !POINTER_TYPE_P (typea))
7009 if (TREE_CODE (ineq) == LT_EXPR)
7011 a1 = TREE_OPERAND (ineq, 1);
7012 y = TREE_OPERAND (ineq, 0);
7014 else if (TREE_CODE (ineq) == GT_EXPR)
7016 a1 = TREE_OPERAND (ineq, 0);
7017 y = TREE_OPERAND (ineq, 1);
7022 if (TREE_TYPE (a1) != typea)
7025 if (POINTER_TYPE_P (typea))
7027 /* Convert the pointer types into integer before taking the difference. */
7028 tree ta = fold_convert_loc (loc, ssizetype, a);
7029 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7030 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7033 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7035 if (!diff || !integer_onep (diff))
7038 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7041 /* Fold a sum or difference of at least one multiplication.
7042 Returns the folded tree or NULL if no simplification could be made. */
7045 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7046 tree arg0, tree arg1)
7048 tree arg00, arg01, arg10, arg11;
7049 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7051 /* (A * C) +- (B * C) -> (A+-B) * C.
7052 (A * C) +- A -> A * (C+-1).
7053 We are most concerned about the case where C is a constant,
7054 but other combinations show up during loop reduction. Since
7055 it is not difficult, try all four possibilities. */
7057 if (TREE_CODE (arg0) == MULT_EXPR)
7059 arg00 = TREE_OPERAND (arg0, 0);
7060 arg01 = TREE_OPERAND (arg0, 1);
7062 else if (TREE_CODE (arg0) == INTEGER_CST)
7064 arg00 = build_one_cst (type);
7069 /* We cannot generate constant 1 for fract. */
7070 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7073 arg01 = build_one_cst (type);
7075 if (TREE_CODE (arg1) == MULT_EXPR)
7077 arg10 = TREE_OPERAND (arg1, 0);
7078 arg11 = TREE_OPERAND (arg1, 1);
7080 else if (TREE_CODE (arg1) == INTEGER_CST)
7082 arg10 = build_one_cst (type);
7083 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7084 the purpose of this canonicalization. */
7085 if (TREE_INT_CST_HIGH (arg1) == -1
7086 && negate_expr_p (arg1)
7087 && code == PLUS_EXPR)
7089 arg11 = negate_expr (arg1);
7097 /* We cannot generate constant 1 for fract. */
7098 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7101 arg11 = build_one_cst (type);
7105 if (operand_equal_p (arg01, arg11, 0))
7106 same = arg01, alt0 = arg00, alt1 = arg10;
7107 else if (operand_equal_p (arg00, arg10, 0))
7108 same = arg00, alt0 = arg01, alt1 = arg11;
7109 else if (operand_equal_p (arg00, arg11, 0))
7110 same = arg00, alt0 = arg01, alt1 = arg10;
7111 else if (operand_equal_p (arg01, arg10, 0))
7112 same = arg01, alt0 = arg00, alt1 = arg11;
7114 /* No identical multiplicands; see if we can find a common
7115 power-of-two factor in non-power-of-two multiplies. This
7116 can help in multi-dimensional array access. */
7117 else if (host_integerp (arg01, 0)
7118 && host_integerp (arg11, 0))
7120 HOST_WIDE_INT int01, int11, tmp;
7123 int01 = TREE_INT_CST_LOW (arg01);
7124 int11 = TREE_INT_CST_LOW (arg11);
7126 /* Move min of absolute values to int11. */
7127 if ((int01 >= 0 ? int01 : -int01)
7128 < (int11 >= 0 ? int11 : -int11))
7130 tmp = int01, int01 = int11, int11 = tmp;
7131 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7138 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7139 /* The remainder should not be a constant, otherwise we
7140 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7141 increased the number of multiplications necessary. */
7142 && TREE_CODE (arg10) != INTEGER_CST)
7144 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7145 build_int_cst (TREE_TYPE (arg00),
7150 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7155 return fold_build2_loc (loc, MULT_EXPR, type,
7156 fold_build2_loc (loc, code, type,
7157 fold_convert_loc (loc, type, alt0),
7158 fold_convert_loc (loc, type, alt1)),
7159 fold_convert_loc (loc, type, same));
7164 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7165 specified by EXPR into the buffer PTR of length LEN bytes.
7166 Return the number of bytes placed in the buffer, or zero
7170 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7172 tree type = TREE_TYPE (expr);
7173 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7174 int byte, offset, word, words;
7175 unsigned char value;
7177 if (total_bytes > len)
7179 words = total_bytes / UNITS_PER_WORD;
7181 for (byte = 0; byte < total_bytes; byte++)
7183 int bitpos = byte * BITS_PER_UNIT;
7184 if (bitpos < HOST_BITS_PER_WIDE_INT)
7185 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7187 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7188 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7190 if (total_bytes > UNITS_PER_WORD)
7192 word = byte / UNITS_PER_WORD;
7193 if (WORDS_BIG_ENDIAN)
7194 word = (words - 1) - word;
7195 offset = word * UNITS_PER_WORD;
7196 if (BYTES_BIG_ENDIAN)
7197 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7199 offset += byte % UNITS_PER_WORD;
7202 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7203 ptr[offset] = value;
7209 /* Subroutine of native_encode_expr. Encode the REAL_CST
7210 specified by EXPR into the buffer PTR of length LEN bytes.
7211 Return the number of bytes placed in the buffer, or zero
7215 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7217 tree type = TREE_TYPE (expr);
7218 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7219 int byte, offset, word, words, bitpos;
7220 unsigned char value;
7222 /* There are always 32 bits in each long, no matter the size of
7223 the hosts long. We handle floating point representations with
7227 if (total_bytes > len)
7229 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7231 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7233 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7234 bitpos += BITS_PER_UNIT)
7236 byte = (bitpos / BITS_PER_UNIT) & 3;
7237 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7239 if (UNITS_PER_WORD < 4)
7241 word = byte / UNITS_PER_WORD;
7242 if (WORDS_BIG_ENDIAN)
7243 word = (words - 1) - word;
7244 offset = word * UNITS_PER_WORD;
7245 if (BYTES_BIG_ENDIAN)
7246 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7248 offset += byte % UNITS_PER_WORD;
7251 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7252 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7257 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7258 specified by EXPR into the buffer PTR of length LEN bytes.
7259 Return the number of bytes placed in the buffer, or zero
7263 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7268 part = TREE_REALPART (expr);
7269 rsize = native_encode_expr (part, ptr, len);
7272 part = TREE_IMAGPART (expr);
7273 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7276 return rsize + isize;
7280 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7281 specified by EXPR into the buffer PTR of length LEN bytes.
7282 Return the number of bytes placed in the buffer, or zero
7286 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7288 int i, size, offset, count;
7289 tree itype, elem, elements;
7292 elements = TREE_VECTOR_CST_ELTS (expr);
7293 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7294 itype = TREE_TYPE (TREE_TYPE (expr));
7295 size = GET_MODE_SIZE (TYPE_MODE (itype));
7296 for (i = 0; i < count; i++)
7300 elem = TREE_VALUE (elements);
7301 elements = TREE_CHAIN (elements);
7308 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7313 if (offset + size > len)
7315 memset (ptr+offset, 0, size);
7323 /* Subroutine of native_encode_expr. Encode the STRING_CST
7324 specified by EXPR into the buffer PTR of length LEN bytes.
7325 Return the number of bytes placed in the buffer, or zero
7329 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7331 tree type = TREE_TYPE (expr);
7332 HOST_WIDE_INT total_bytes;
7334 if (TREE_CODE (type) != ARRAY_TYPE
7335 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7336 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7337 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7339 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7340 if (total_bytes > len)
7342 if (TREE_STRING_LENGTH (expr) < total_bytes)
7344 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7345 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7346 total_bytes - TREE_STRING_LENGTH (expr));
7349 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7354 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7355 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7356 buffer PTR of length LEN bytes. Return the number of bytes
7357 placed in the buffer, or zero upon failure. */
7360 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7362 switch (TREE_CODE (expr))
7365 return native_encode_int (expr, ptr, len);
7368 return native_encode_real (expr, ptr, len);
7371 return native_encode_complex (expr, ptr, len);
7374 return native_encode_vector (expr, ptr, len);
7377 return native_encode_string (expr, ptr, len);
7385 /* Subroutine of native_interpret_expr. Interpret the contents of
7386 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7387 If the buffer cannot be interpreted, return NULL_TREE. */
7390 native_interpret_int (tree type, const unsigned char *ptr, int len)
7392 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7393 int byte, offset, word, words;
7394 unsigned char value;
7397 if (total_bytes > len)
7399 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7402 result = double_int_zero;
7403 words = total_bytes / UNITS_PER_WORD;
7405 for (byte = 0; byte < total_bytes; byte++)
7407 int bitpos = byte * BITS_PER_UNIT;
7408 if (total_bytes > UNITS_PER_WORD)
7410 word = byte / UNITS_PER_WORD;
7411 if (WORDS_BIG_ENDIAN)
7412 word = (words - 1) - word;
7413 offset = word * UNITS_PER_WORD;
7414 if (BYTES_BIG_ENDIAN)
7415 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7417 offset += byte % UNITS_PER_WORD;
7420 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7421 value = ptr[offset];
7423 if (bitpos < HOST_BITS_PER_WIDE_INT)
7424 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7426 result.high |= (unsigned HOST_WIDE_INT) value
7427 << (bitpos - HOST_BITS_PER_WIDE_INT);
7430 return double_int_to_tree (type, result);
7434 /* Subroutine of native_interpret_expr. Interpret the contents of
7435 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7436 If the buffer cannot be interpreted, return NULL_TREE. */
7439 native_interpret_real (tree type, const unsigned char *ptr, int len)
7441 enum machine_mode mode = TYPE_MODE (type);
7442 int total_bytes = GET_MODE_SIZE (mode);
7443 int byte, offset, word, words, bitpos;
7444 unsigned char value;
7445 /* There are always 32 bits in each long, no matter the size of
7446 the hosts long. We handle floating point representations with
7451 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7452 if (total_bytes > len || total_bytes > 24)
7454 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7456 memset (tmp, 0, sizeof (tmp));
7457 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7458 bitpos += BITS_PER_UNIT)
7460 byte = (bitpos / BITS_PER_UNIT) & 3;
7461 if (UNITS_PER_WORD < 4)
7463 word = byte / UNITS_PER_WORD;
7464 if (WORDS_BIG_ENDIAN)
7465 word = (words - 1) - word;
7466 offset = word * UNITS_PER_WORD;
7467 if (BYTES_BIG_ENDIAN)
7468 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7470 offset += byte % UNITS_PER_WORD;
7473 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7474 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7476 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7479 real_from_target (&r, tmp, mode);
7480 return build_real (type, r);
7484 /* Subroutine of native_interpret_expr. Interpret the contents of
7485 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7486 If the buffer cannot be interpreted, return NULL_TREE. */
7489 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7491 tree etype, rpart, ipart;
7494 etype = TREE_TYPE (type);
7495 size = GET_MODE_SIZE (TYPE_MODE (etype));
7498 rpart = native_interpret_expr (etype, ptr, size);
7501 ipart = native_interpret_expr (etype, ptr+size, size);
7504 return build_complex (type, rpart, ipart);
7508 /* Subroutine of native_interpret_expr. Interpret the contents of
7509 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7510 If the buffer cannot be interpreted, return NULL_TREE. */
7513 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7515 tree etype, elem, elements;
7518 etype = TREE_TYPE (type);
7519 size = GET_MODE_SIZE (TYPE_MODE (etype));
7520 count = TYPE_VECTOR_SUBPARTS (type);
7521 if (size * count > len)
7524 elements = NULL_TREE;
7525 for (i = count - 1; i >= 0; i--)
7527 elem = native_interpret_expr (etype, ptr+(i*size), size);
7530 elements = tree_cons (NULL_TREE, elem, elements);
7532 return build_vector (type, elements);
7536 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7537 the buffer PTR of length LEN as a constant of type TYPE. For
7538 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7539 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7540 return NULL_TREE. */
7543 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7545 switch (TREE_CODE (type))
7550 return native_interpret_int (type, ptr, len);
7553 return native_interpret_real (type, ptr, len);
7556 return native_interpret_complex (type, ptr, len);
7559 return native_interpret_vector (type, ptr, len);
7567 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7568 TYPE at compile-time. If we're unable to perform the conversion
7569 return NULL_TREE. */
7572 fold_view_convert_expr (tree type, tree expr)
7574 /* We support up to 512-bit values (for V8DFmode). */
7575 unsigned char buffer[64];
7578 /* Check that the host and target are sane. */
7579 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7582 len = native_encode_expr (expr, buffer, sizeof (buffer));
7586 return native_interpret_expr (type, buffer, len);
7589 /* Build an expression for the address of T. Folds away INDIRECT_REF
7590 to avoid confusing the gimplify process. */
7593 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7595 /* The size of the object is not relevant when talking about its address. */
7596 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7597 t = TREE_OPERAND (t, 0);
7599 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7600 if (TREE_CODE (t) == INDIRECT_REF
7601 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7603 t = TREE_OPERAND (t, 0);
7605 if (TREE_TYPE (t) != ptrtype)
7607 t = build1 (NOP_EXPR, ptrtype, t);
7608 SET_EXPR_LOCATION (t, loc);
7611 else if (TREE_CODE (t) == MEM_REF
7612 && integer_zerop (TREE_OPERAND (t, 1)))
7613 return TREE_OPERAND (t, 0);
7614 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7616 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7618 if (TREE_TYPE (t) != ptrtype)
7619 t = fold_convert_loc (loc, ptrtype, t);
7623 t = build1 (ADDR_EXPR, ptrtype, t);
7624 SET_EXPR_LOCATION (t, loc);
7630 /* Build an expression for the address of T. */
7633 build_fold_addr_expr_loc (location_t loc, tree t)
7635 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7637 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7640 /* Fold a unary expression of code CODE and type TYPE with operand
7641 OP0. Return the folded expression if folding is successful.
7642 Otherwise, return NULL_TREE. */
7645 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7649 enum tree_code_class kind = TREE_CODE_CLASS (code);
7651 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7652 && TREE_CODE_LENGTH (code) == 1);
7657 if (CONVERT_EXPR_CODE_P (code)
7658 || code == FLOAT_EXPR || code == ABS_EXPR)
7660 /* Don't use STRIP_NOPS, because signedness of argument type
7662 STRIP_SIGN_NOPS (arg0);
7666 /* Strip any conversions that don't change the mode. This
7667 is safe for every expression, except for a comparison
7668 expression because its signedness is derived from its
7671 Note that this is done as an internal manipulation within
7672 the constant folder, in order to find the simplest
7673 representation of the arguments so that their form can be
7674 studied. In any cases, the appropriate type conversions
7675 should be put back in the tree that will get out of the
7681 if (TREE_CODE_CLASS (code) == tcc_unary)
7683 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7684 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7685 fold_build1_loc (loc, code, type,
7686 fold_convert_loc (loc, TREE_TYPE (op0),
7687 TREE_OPERAND (arg0, 1))));
7688 else if (TREE_CODE (arg0) == COND_EXPR)
7690 tree arg01 = TREE_OPERAND (arg0, 1);
7691 tree arg02 = TREE_OPERAND (arg0, 2);
7692 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7693 arg01 = fold_build1_loc (loc, code, type,
7694 fold_convert_loc (loc,
7695 TREE_TYPE (op0), arg01));
7696 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7697 arg02 = fold_build1_loc (loc, code, type,
7698 fold_convert_loc (loc,
7699 TREE_TYPE (op0), arg02));
7700 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7703 /* If this was a conversion, and all we did was to move into
7704 inside the COND_EXPR, bring it back out. But leave it if
7705 it is a conversion from integer to integer and the
7706 result precision is no wider than a word since such a
7707 conversion is cheap and may be optimized away by combine,
7708 while it couldn't if it were outside the COND_EXPR. Then return
7709 so we don't get into an infinite recursion loop taking the
7710 conversion out and then back in. */
7712 if ((CONVERT_EXPR_CODE_P (code)
7713 || code == NON_LVALUE_EXPR)
7714 && TREE_CODE (tem) == COND_EXPR
7715 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7716 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7717 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7718 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7719 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7720 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7721 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7723 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7724 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7725 || flag_syntax_only))
7727 tem = build1 (code, type,
7729 TREE_TYPE (TREE_OPERAND
7730 (TREE_OPERAND (tem, 1), 0)),
7731 TREE_OPERAND (tem, 0),
7732 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7733 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7734 SET_EXPR_LOCATION (tem, loc);
7738 else if (COMPARISON_CLASS_P (arg0))
7740 if (TREE_CODE (type) == BOOLEAN_TYPE)
7742 arg0 = copy_node (arg0);
7743 TREE_TYPE (arg0) = type;
7746 else if (TREE_CODE (type) != INTEGER_TYPE)
7747 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7748 fold_build1_loc (loc, code, type,
7750 fold_build1_loc (loc, code, type,
7751 integer_zero_node));
7758 /* Re-association barriers around constants and other re-association
7759 barriers can be removed. */
7760 if (CONSTANT_CLASS_P (op0)
7761 || TREE_CODE (op0) == PAREN_EXPR)
7762 return fold_convert_loc (loc, type, op0);
7767 case FIX_TRUNC_EXPR:
7768 if (TREE_TYPE (op0) == type)
7771 /* If we have (type) (a CMP b) and type is an integral type, return
7772 new expression involving the new type. */
7773 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7774 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7775 TREE_OPERAND (op0, 1));
7777 /* Handle cases of two conversions in a row. */
7778 if (CONVERT_EXPR_P (op0))
7780 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7781 tree inter_type = TREE_TYPE (op0);
7782 int inside_int = INTEGRAL_TYPE_P (inside_type);
7783 int inside_ptr = POINTER_TYPE_P (inside_type);
7784 int inside_float = FLOAT_TYPE_P (inside_type);
7785 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7786 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7787 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7788 int inter_int = INTEGRAL_TYPE_P (inter_type);
7789 int inter_ptr = POINTER_TYPE_P (inter_type);
7790 int inter_float = FLOAT_TYPE_P (inter_type);
7791 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7792 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7793 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7794 int final_int = INTEGRAL_TYPE_P (type);
7795 int final_ptr = POINTER_TYPE_P (type);
7796 int final_float = FLOAT_TYPE_P (type);
7797 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7798 unsigned int final_prec = TYPE_PRECISION (type);
7799 int final_unsignedp = TYPE_UNSIGNED (type);
7801 /* In addition to the cases of two conversions in a row
7802 handled below, if we are converting something to its own
7803 type via an object of identical or wider precision, neither
7804 conversion is needed. */
7805 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7806 && (((inter_int || inter_ptr) && final_int)
7807 || (inter_float && final_float))
7808 && inter_prec >= final_prec)
7809 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7811 /* Likewise, if the intermediate and initial types are either both
7812 float or both integer, we don't need the middle conversion if the
7813 former is wider than the latter and doesn't change the signedness
7814 (for integers). Avoid this if the final type is a pointer since
7815 then we sometimes need the middle conversion. Likewise if the
7816 final type has a precision not equal to the size of its mode. */
7817 if (((inter_int && inside_int)
7818 || (inter_float && inside_float)
7819 || (inter_vec && inside_vec))
7820 && inter_prec >= inside_prec
7821 && (inter_float || inter_vec
7822 || inter_unsignedp == inside_unsignedp)
7823 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7824 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7826 && (! final_vec || inter_prec == inside_prec))
7827 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7829 /* If we have a sign-extension of a zero-extended value, we can
7830 replace that by a single zero-extension. */
7831 if (inside_int && inter_int && final_int
7832 && inside_prec < inter_prec && inter_prec < final_prec
7833 && inside_unsignedp && !inter_unsignedp)
7834 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7836 /* Two conversions in a row are not needed unless:
7837 - some conversion is floating-point (overstrict for now), or
7838 - some conversion is a vector (overstrict for now), or
7839 - the intermediate type is narrower than both initial and
7841 - the intermediate type and innermost type differ in signedness,
7842 and the outermost type is wider than the intermediate, or
7843 - the initial type is a pointer type and the precisions of the
7844 intermediate and final types differ, or
7845 - the final type is a pointer type and the precisions of the
7846 initial and intermediate types differ. */
7847 if (! inside_float && ! inter_float && ! final_float
7848 && ! inside_vec && ! inter_vec && ! final_vec
7849 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7850 && ! (inside_int && inter_int
7851 && inter_unsignedp != inside_unsignedp
7852 && inter_prec < final_prec)
7853 && ((inter_unsignedp && inter_prec > inside_prec)
7854 == (final_unsignedp && final_prec > inter_prec))
7855 && ! (inside_ptr && inter_prec != final_prec)
7856 && ! (final_ptr && inside_prec != inter_prec)
7857 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7858 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7859 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7862 /* Handle (T *)&A.B.C for A being of type T and B and C
7863 living at offset zero. This occurs frequently in
7864 C++ upcasting and then accessing the base. */
7865 if (TREE_CODE (op0) == ADDR_EXPR
7866 && POINTER_TYPE_P (type)
7867 && handled_component_p (TREE_OPERAND (op0, 0)))
7869 HOST_WIDE_INT bitsize, bitpos;
7871 enum machine_mode mode;
7872 int unsignedp, volatilep;
7873 tree base = TREE_OPERAND (op0, 0);
7874 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7875 &mode, &unsignedp, &volatilep, false);
7876 /* If the reference was to a (constant) zero offset, we can use
7877 the address of the base if it has the same base type
7878 as the result type and the pointer type is unqualified. */
7879 if (! offset && bitpos == 0
7880 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7881 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7882 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7883 return fold_convert_loc (loc, type,
7884 build_fold_addr_expr_loc (loc, base));
7887 if (TREE_CODE (op0) == MODIFY_EXPR
7888 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7889 /* Detect assigning a bitfield. */
7890 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7892 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7894 /* Don't leave an assignment inside a conversion
7895 unless assigning a bitfield. */
7896 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7897 /* First do the assignment, then return converted constant. */
7898 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7899 TREE_NO_WARNING (tem) = 1;
7900 TREE_USED (tem) = 1;
7901 SET_EXPR_LOCATION (tem, loc);
7905 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7906 constants (if x has signed type, the sign bit cannot be set
7907 in c). This folds extension into the BIT_AND_EXPR.
7908 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7909 very likely don't have maximal range for their precision and this
7910 transformation effectively doesn't preserve non-maximal ranges. */
7911 if (TREE_CODE (type) == INTEGER_TYPE
7912 && TREE_CODE (op0) == BIT_AND_EXPR
7913 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7915 tree and_expr = op0;
7916 tree and0 = TREE_OPERAND (and_expr, 0);
7917 tree and1 = TREE_OPERAND (and_expr, 1);
7920 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7921 || (TYPE_PRECISION (type)
7922 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7924 else if (TYPE_PRECISION (TREE_TYPE (and1))
7925 <= HOST_BITS_PER_WIDE_INT
7926 && host_integerp (and1, 1))
7928 unsigned HOST_WIDE_INT cst;
7930 cst = tree_low_cst (and1, 1);
7931 cst &= (HOST_WIDE_INT) -1
7932 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7933 change = (cst == 0);
7934 #ifdef LOAD_EXTEND_OP
7936 && !flag_syntax_only
7937 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7940 tree uns = unsigned_type_for (TREE_TYPE (and0));
7941 and0 = fold_convert_loc (loc, uns, and0);
7942 and1 = fold_convert_loc (loc, uns, and1);
7948 tem = force_fit_type_double (type, tree_to_double_int (and1),
7949 0, TREE_OVERFLOW (and1));
7950 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7951 fold_convert_loc (loc, type, and0), tem);
7955 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7956 when one of the new casts will fold away. Conservatively we assume
7957 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7958 if (POINTER_TYPE_P (type)
7959 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7960 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7961 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7962 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7964 tree arg00 = TREE_OPERAND (arg0, 0);
7965 tree arg01 = TREE_OPERAND (arg0, 1);
7967 return fold_build2_loc (loc,
7968 TREE_CODE (arg0), type,
7969 fold_convert_loc (loc, type, arg00),
7970 fold_convert_loc (loc, sizetype, arg01));
7973 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7974 of the same precision, and X is an integer type not narrower than
7975 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7976 if (INTEGRAL_TYPE_P (type)
7977 && TREE_CODE (op0) == BIT_NOT_EXPR
7978 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7979 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7980 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7982 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7983 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7984 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7985 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7986 fold_convert_loc (loc, type, tem));
7989 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7990 type of X and Y (integer types only). */
7991 if (INTEGRAL_TYPE_P (type)
7992 && TREE_CODE (op0) == MULT_EXPR
7993 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7994 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7996 /* Be careful not to introduce new overflows. */
7998 if (TYPE_OVERFLOW_WRAPS (type))
8001 mult_type = unsigned_type_for (type);
8003 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8005 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8006 fold_convert_loc (loc, mult_type,
8007 TREE_OPERAND (op0, 0)),
8008 fold_convert_loc (loc, mult_type,
8009 TREE_OPERAND (op0, 1)));
8010 return fold_convert_loc (loc, type, tem);
8014 tem = fold_convert_const (code, type, op0);
8015 return tem ? tem : NULL_TREE;
8017 case ADDR_SPACE_CONVERT_EXPR:
8018 if (integer_zerop (arg0))
8019 return fold_convert_const (code, type, arg0);
8022 case FIXED_CONVERT_EXPR:
8023 tem = fold_convert_const (code, type, arg0);
8024 return tem ? tem : NULL_TREE;
8026 case VIEW_CONVERT_EXPR:
8027 if (TREE_TYPE (op0) == type)
8029 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8030 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8031 type, TREE_OPERAND (op0, 0));
8032 if (TREE_CODE (op0) == MEM_REF)
8033 return fold_build2_loc (loc, MEM_REF, type,
8034 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8036 /* For integral conversions with the same precision or pointer
8037 conversions use a NOP_EXPR instead. */
8038 if ((INTEGRAL_TYPE_P (type)
8039 || POINTER_TYPE_P (type))
8040 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8041 || POINTER_TYPE_P (TREE_TYPE (op0)))
8042 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8043 return fold_convert_loc (loc, type, op0);
8045 /* Strip inner integral conversions that do not change the precision. */
8046 if (CONVERT_EXPR_P (op0)
8047 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8048 || POINTER_TYPE_P (TREE_TYPE (op0)))
8049 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8050 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8051 && (TYPE_PRECISION (TREE_TYPE (op0))
8052 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8053 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8054 type, TREE_OPERAND (op0, 0));
8056 return fold_view_convert_expr (type, op0);
8059 tem = fold_negate_expr (loc, arg0);
8061 return fold_convert_loc (loc, type, tem);
8065 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8066 return fold_abs_const (arg0, type);
8067 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8068 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8069 /* Convert fabs((double)float) into (double)fabsf(float). */
8070 else if (TREE_CODE (arg0) == NOP_EXPR
8071 && TREE_CODE (type) == REAL_TYPE)
8073 tree targ0 = strip_float_extensions (arg0);
8075 return fold_convert_loc (loc, type,
8076 fold_build1_loc (loc, ABS_EXPR,
8080 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8081 else if (TREE_CODE (arg0) == ABS_EXPR)
8083 else if (tree_expr_nonnegative_p (arg0))
8086 /* Strip sign ops from argument. */
8087 if (TREE_CODE (type) == REAL_TYPE)
8089 tem = fold_strip_sign_ops (arg0);
8091 return fold_build1_loc (loc, ABS_EXPR, type,
8092 fold_convert_loc (loc, type, tem));
8097 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8098 return fold_convert_loc (loc, type, arg0);
8099 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8101 tree itype = TREE_TYPE (type);
8102 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8103 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8104 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8105 negate_expr (ipart));
8107 if (TREE_CODE (arg0) == COMPLEX_CST)
8109 tree itype = TREE_TYPE (type);
8110 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8111 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8112 return build_complex (type, rpart, negate_expr (ipart));
8114 if (TREE_CODE (arg0) == CONJ_EXPR)
8115 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8119 if (TREE_CODE (arg0) == INTEGER_CST)
8120 return fold_not_const (arg0, type);
8121 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8122 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8123 /* Convert ~ (-A) to A - 1. */
8124 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8125 return fold_build2_loc (loc, MINUS_EXPR, type,
8126 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8127 build_int_cst (type, 1));
8128 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8129 else if (INTEGRAL_TYPE_P (type)
8130 && ((TREE_CODE (arg0) == MINUS_EXPR
8131 && integer_onep (TREE_OPERAND (arg0, 1)))
8132 || (TREE_CODE (arg0) == PLUS_EXPR
8133 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8134 return fold_build1_loc (loc, NEGATE_EXPR, type,
8135 fold_convert_loc (loc, type,
8136 TREE_OPERAND (arg0, 0)));
8137 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8138 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8139 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8140 fold_convert_loc (loc, type,
8141 TREE_OPERAND (arg0, 0)))))
8142 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8143 fold_convert_loc (loc, type,
8144 TREE_OPERAND (arg0, 1)));
8145 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8146 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8147 fold_convert_loc (loc, type,
8148 TREE_OPERAND (arg0, 1)))))
8149 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8150 fold_convert_loc (loc, type,
8151 TREE_OPERAND (arg0, 0)), tem);
8152 /* Perform BIT_NOT_EXPR on each element individually. */
8153 else if (TREE_CODE (arg0) == VECTOR_CST)
8155 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8156 int count = TYPE_VECTOR_SUBPARTS (type), i;
8158 for (i = 0; i < count; i++)
8162 elem = TREE_VALUE (elements);
8163 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8164 if (elem == NULL_TREE)
8166 elements = TREE_CHAIN (elements);
8169 elem = build_int_cst (TREE_TYPE (type), -1);
8170 list = tree_cons (NULL_TREE, elem, list);
8173 return build_vector (type, nreverse (list));
8178 case TRUTH_NOT_EXPR:
8179 /* The argument to invert_truthvalue must have Boolean type. */
8180 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8181 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8183 /* Note that the operand of this must be an int
8184 and its values must be 0 or 1.
8185 ("true" is a fixed value perhaps depending on the language,
8186 but we don't handle values other than 1 correctly yet.) */
8187 tem = fold_truth_not_expr (loc, arg0);
8190 return fold_convert_loc (loc, type, tem);
8193 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8194 return fold_convert_loc (loc, type, arg0);
8195 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8196 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8197 TREE_OPERAND (arg0, 1));
8198 if (TREE_CODE (arg0) == COMPLEX_CST)
8199 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8200 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8202 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8203 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8204 fold_build1_loc (loc, REALPART_EXPR, itype,
8205 TREE_OPERAND (arg0, 0)),
8206 fold_build1_loc (loc, REALPART_EXPR, itype,
8207 TREE_OPERAND (arg0, 1)));
8208 return fold_convert_loc (loc, type, tem);
8210 if (TREE_CODE (arg0) == CONJ_EXPR)
8212 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8213 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8214 TREE_OPERAND (arg0, 0));
8215 return fold_convert_loc (loc, type, tem);
8217 if (TREE_CODE (arg0) == CALL_EXPR)
8219 tree fn = get_callee_fndecl (arg0);
8220 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8221 switch (DECL_FUNCTION_CODE (fn))
8223 CASE_FLT_FN (BUILT_IN_CEXPI):
8224 fn = mathfn_built_in (type, BUILT_IN_COS);
8226 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8236 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8237 return fold_convert_loc (loc, type, integer_zero_node);
8238 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8239 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8240 TREE_OPERAND (arg0, 0));
8241 if (TREE_CODE (arg0) == COMPLEX_CST)
8242 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8243 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8245 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8246 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8247 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8248 TREE_OPERAND (arg0, 0)),
8249 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8250 TREE_OPERAND (arg0, 1)));
8251 return fold_convert_loc (loc, type, tem);
8253 if (TREE_CODE (arg0) == CONJ_EXPR)
8255 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8256 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8257 return fold_convert_loc (loc, type, negate_expr (tem));
8259 if (TREE_CODE (arg0) == CALL_EXPR)
8261 tree fn = get_callee_fndecl (arg0);
8262 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8263 switch (DECL_FUNCTION_CODE (fn))
8265 CASE_FLT_FN (BUILT_IN_CEXPI):
8266 fn = mathfn_built_in (type, BUILT_IN_SIN);
8268 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8278 /* Fold *&X to X if X is an lvalue. */
8279 if (TREE_CODE (op0) == ADDR_EXPR)
8281 tree op00 = TREE_OPERAND (op0, 0);
8282 if ((TREE_CODE (op00) == VAR_DECL
8283 || TREE_CODE (op00) == PARM_DECL
8284 || TREE_CODE (op00) == RESULT_DECL)
8285 && !TREE_READONLY (op00))
8292 } /* switch (code) */
8296 /* If the operation was a conversion do _not_ mark a resulting constant
8297 with TREE_OVERFLOW if the original constant was not. These conversions
8298 have implementation defined behavior and retaining the TREE_OVERFLOW
8299 flag here would confuse later passes such as VRP. */
8301 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8302 tree type, tree op0)
8304 tree res = fold_unary_loc (loc, code, type, op0);
8306 && TREE_CODE (res) == INTEGER_CST
8307 && TREE_CODE (op0) == INTEGER_CST
8308 && CONVERT_EXPR_CODE_P (code))
8309 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8314 /* Fold a binary expression of code CODE and type TYPE with operands
8315 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8316 Return the folded expression if folding is successful. Otherwise,
8317 return NULL_TREE. */
8320 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8322 enum tree_code compl_code;
8324 if (code == MIN_EXPR)
8325 compl_code = MAX_EXPR;
8326 else if (code == MAX_EXPR)
8327 compl_code = MIN_EXPR;
8331 /* MIN (MAX (a, b), b) == b. */
8332 if (TREE_CODE (op0) == compl_code
8333 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8334 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8336 /* MIN (MAX (b, a), b) == b. */
8337 if (TREE_CODE (op0) == compl_code
8338 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8339 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8340 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8342 /* MIN (a, MAX (a, b)) == a. */
8343 if (TREE_CODE (op1) == compl_code
8344 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8345 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8346 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8348 /* MIN (a, MAX (b, a)) == a. */
8349 if (TREE_CODE (op1) == compl_code
8350 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8351 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8352 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8357 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8358 by changing CODE to reduce the magnitude of constants involved in
8359 ARG0 of the comparison.
8360 Returns a canonicalized comparison tree if a simplification was
8361 possible, otherwise returns NULL_TREE.
8362 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8363 valid if signed overflow is undefined. */
8366 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8367 tree arg0, tree arg1,
8368 bool *strict_overflow_p)
8370 enum tree_code code0 = TREE_CODE (arg0);
8371 tree t, cst0 = NULL_TREE;
8375 /* Match A +- CST code arg1 and CST code arg1. We can change the
8376 first form only if overflow is undefined. */
8377 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8378 /* In principle pointers also have undefined overflow behavior,
8379 but that causes problems elsewhere. */
8380 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8381 && (code0 == MINUS_EXPR
8382 || code0 == PLUS_EXPR)
8383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8384 || code0 == INTEGER_CST))
8387 /* Identify the constant in arg0 and its sign. */
8388 if (code0 == INTEGER_CST)
8391 cst0 = TREE_OPERAND (arg0, 1);
8392 sgn0 = tree_int_cst_sgn (cst0);
8394 /* Overflowed constants and zero will cause problems. */
8395 if (integer_zerop (cst0)
8396 || TREE_OVERFLOW (cst0))
8399 /* See if we can reduce the magnitude of the constant in
8400 arg0 by changing the comparison code. */
8401 if (code0 == INTEGER_CST)
8403 /* CST <= arg1 -> CST-1 < arg1. */
8404 if (code == LE_EXPR && sgn0 == 1)
8406 /* -CST < arg1 -> -CST-1 <= arg1. */
8407 else if (code == LT_EXPR && sgn0 == -1)
8409 /* CST > arg1 -> CST-1 >= arg1. */
8410 else if (code == GT_EXPR && sgn0 == 1)
8412 /* -CST >= arg1 -> -CST-1 > arg1. */
8413 else if (code == GE_EXPR && sgn0 == -1)
8417 /* arg1 code' CST' might be more canonical. */
8422 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8424 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8426 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8427 else if (code == GT_EXPR
8428 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8430 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8431 else if (code == LE_EXPR
8432 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8434 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8435 else if (code == GE_EXPR
8436 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8440 *strict_overflow_p = true;
8443 /* Now build the constant reduced in magnitude. But not if that
8444 would produce one outside of its types range. */
8445 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8447 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8448 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8450 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8451 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8452 /* We cannot swap the comparison here as that would cause us to
8453 endlessly recurse. */
8456 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8457 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8458 if (code0 != INTEGER_CST)
8459 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8461 /* If swapping might yield to a more canonical form, do so. */
8463 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8465 return fold_build2_loc (loc, code, type, t, arg1);
8468 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8469 overflow further. Try to decrease the magnitude of constants involved
8470 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8471 and put sole constants at the second argument position.
8472 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8475 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8476 tree arg0, tree arg1)
8479 bool strict_overflow_p;
8480 const char * const warnmsg = G_("assuming signed overflow does not occur "
8481 "when reducing constant in comparison");
8483 /* Try canonicalization by simplifying arg0. */
8484 strict_overflow_p = false;
8485 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8486 &strict_overflow_p);
8489 if (strict_overflow_p)
8490 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8494 /* Try canonicalization by simplifying arg1 using the swapped
8496 code = swap_tree_comparison (code);
8497 strict_overflow_p = false;
8498 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8499 &strict_overflow_p);
8500 if (t && strict_overflow_p)
8501 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8505 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8506 space. This is used to avoid issuing overflow warnings for
8507 expressions like &p->x which can not wrap. */
8510 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8512 unsigned HOST_WIDE_INT offset_low, total_low;
8513 HOST_WIDE_INT size, offset_high, total_high;
8515 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8521 if (offset == NULL_TREE)
8526 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8530 offset_low = TREE_INT_CST_LOW (offset);
8531 offset_high = TREE_INT_CST_HIGH (offset);
8534 if (add_double_with_sign (offset_low, offset_high,
8535 bitpos / BITS_PER_UNIT, 0,
8536 &total_low, &total_high,
8540 if (total_high != 0)
8543 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8547 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8549 if (TREE_CODE (base) == ADDR_EXPR)
8551 HOST_WIDE_INT base_size;
8553 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8554 if (base_size > 0 && size < base_size)
8558 return total_low > (unsigned HOST_WIDE_INT) size;
8561 /* Subroutine of fold_binary. This routine performs all of the
8562 transformations that are common to the equality/inequality
8563 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8564 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8565 fold_binary should call fold_binary. Fold a comparison with
8566 tree code CODE and type TYPE with operands OP0 and OP1. Return
8567 the folded comparison or NULL_TREE. */
8570 fold_comparison (location_t loc, enum tree_code code, tree type,
8573 tree arg0, arg1, tem;
8578 STRIP_SIGN_NOPS (arg0);
8579 STRIP_SIGN_NOPS (arg1);
8581 tem = fold_relational_const (code, type, arg0, arg1);
8582 if (tem != NULL_TREE)
8585 /* If one arg is a real or integer constant, put it last. */
8586 if (tree_swap_operands_p (arg0, arg1, true))
8587 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8589 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8590 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8591 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8592 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8593 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8594 && (TREE_CODE (arg1) == INTEGER_CST
8595 && !TREE_OVERFLOW (arg1)))
8597 tree const1 = TREE_OPERAND (arg0, 1);
8599 tree variable = TREE_OPERAND (arg0, 0);
8602 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8604 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8605 TREE_TYPE (arg1), const2, const1);
8607 /* If the constant operation overflowed this can be
8608 simplified as a comparison against INT_MAX/INT_MIN. */
8609 if (TREE_CODE (lhs) == INTEGER_CST
8610 && TREE_OVERFLOW (lhs))
8612 int const1_sgn = tree_int_cst_sgn (const1);
8613 enum tree_code code2 = code;
8615 /* Get the sign of the constant on the lhs if the
8616 operation were VARIABLE + CONST1. */
8617 if (TREE_CODE (arg0) == MINUS_EXPR)
8618 const1_sgn = -const1_sgn;
8620 /* The sign of the constant determines if we overflowed
8621 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8622 Canonicalize to the INT_MIN overflow by swapping the comparison
8624 if (const1_sgn == -1)
8625 code2 = swap_tree_comparison (code);
8627 /* We now can look at the canonicalized case
8628 VARIABLE + 1 CODE2 INT_MIN
8629 and decide on the result. */
8630 if (code2 == LT_EXPR
8632 || code2 == EQ_EXPR)
8633 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8634 else if (code2 == NE_EXPR
8636 || code2 == GT_EXPR)
8637 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8640 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8641 && (TREE_CODE (lhs) != INTEGER_CST
8642 || !TREE_OVERFLOW (lhs)))
8644 fold_overflow_warning ("assuming signed overflow does not occur "
8645 "when changing X +- C1 cmp C2 to "
8647 WARN_STRICT_OVERFLOW_COMPARISON);
8648 return fold_build2_loc (loc, code, type, variable, lhs);
8652 /* For comparisons of pointers we can decompose it to a compile time
8653 comparison of the base objects and the offsets into the object.
8654 This requires at least one operand being an ADDR_EXPR or a
8655 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8656 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8657 && (TREE_CODE (arg0) == ADDR_EXPR
8658 || TREE_CODE (arg1) == ADDR_EXPR
8659 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8660 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8662 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8663 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8664 enum machine_mode mode;
8665 int volatilep, unsignedp;
8666 bool indirect_base0 = false, indirect_base1 = false;
8668 /* Get base and offset for the access. Strip ADDR_EXPR for
8669 get_inner_reference, but put it back by stripping INDIRECT_REF
8670 off the base object if possible. indirect_baseN will be true
8671 if baseN is not an address but refers to the object itself. */
8673 if (TREE_CODE (arg0) == ADDR_EXPR)
8675 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8676 &bitsize, &bitpos0, &offset0, &mode,
8677 &unsignedp, &volatilep, false);
8678 if (TREE_CODE (base0) == INDIRECT_REF)
8679 base0 = TREE_OPERAND (base0, 0);
8681 indirect_base0 = true;
8683 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8685 base0 = TREE_OPERAND (arg0, 0);
8686 if (TREE_CODE (base0) == ADDR_EXPR)
8688 base0 = TREE_OPERAND (base0, 0);
8689 indirect_base0 = true;
8691 offset0 = TREE_OPERAND (arg0, 1);
8695 if (TREE_CODE (arg1) == ADDR_EXPR)
8697 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8698 &bitsize, &bitpos1, &offset1, &mode,
8699 &unsignedp, &volatilep, false);
8700 if (TREE_CODE (base1) == INDIRECT_REF)
8701 base1 = TREE_OPERAND (base1, 0);
8703 indirect_base1 = true;
8705 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8707 base1 = TREE_OPERAND (arg1, 0);
8708 if (TREE_CODE (base1) == ADDR_EXPR)
8710 base1 = TREE_OPERAND (base1, 0);
8711 indirect_base1 = true;
8713 offset1 = TREE_OPERAND (arg1, 1);
8716 /* A local variable can never be pointed to by
8717 the default SSA name of an incoming parameter. */
8718 if ((TREE_CODE (arg0) == ADDR_EXPR
8720 && TREE_CODE (base0) == VAR_DECL
8721 && auto_var_in_fn_p (base0, current_function_decl)
8723 && TREE_CODE (base1) == SSA_NAME
8724 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8725 && SSA_NAME_IS_DEFAULT_DEF (base1))
8726 || (TREE_CODE (arg1) == ADDR_EXPR
8728 && TREE_CODE (base1) == VAR_DECL
8729 && auto_var_in_fn_p (base1, current_function_decl)
8731 && TREE_CODE (base0) == SSA_NAME
8732 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8733 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8735 if (code == NE_EXPR)
8736 return constant_boolean_node (1, type);
8737 else if (code == EQ_EXPR)
8738 return constant_boolean_node (0, type);
8740 /* If we have equivalent bases we might be able to simplify. */
8741 else if (indirect_base0 == indirect_base1
8742 && operand_equal_p (base0, base1, 0))
8744 /* We can fold this expression to a constant if the non-constant
8745 offset parts are equal. */
8746 if ((offset0 == offset1
8747 || (offset0 && offset1
8748 && operand_equal_p (offset0, offset1, 0)))
8751 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8756 && bitpos0 != bitpos1
8757 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8758 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8759 fold_overflow_warning (("assuming pointer wraparound does not "
8760 "occur when comparing P +- C1 with "
8762 WARN_STRICT_OVERFLOW_CONDITIONAL);
8767 return constant_boolean_node (bitpos0 == bitpos1, type);
8769 return constant_boolean_node (bitpos0 != bitpos1, type);
8771 return constant_boolean_node (bitpos0 < bitpos1, type);
8773 return constant_boolean_node (bitpos0 <= bitpos1, type);
8775 return constant_boolean_node (bitpos0 >= bitpos1, type);
8777 return constant_boolean_node (bitpos0 > bitpos1, type);
8781 /* We can simplify the comparison to a comparison of the variable
8782 offset parts if the constant offset parts are equal.
8783 Be careful to use signed size type here because otherwise we
8784 mess with array offsets in the wrong way. This is possible
8785 because pointer arithmetic is restricted to retain within an
8786 object and overflow on pointer differences is undefined as of
8787 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8788 else if (bitpos0 == bitpos1
8789 && ((code == EQ_EXPR || code == NE_EXPR)
8790 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8792 /* By converting to signed size type we cover middle-end pointer
8793 arithmetic which operates on unsigned pointer types of size
8794 type size and ARRAY_REF offsets which are properly sign or
8795 zero extended from their type in case it is narrower than
8797 if (offset0 == NULL_TREE)
8798 offset0 = build_int_cst (ssizetype, 0);
8800 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8801 if (offset1 == NULL_TREE)
8802 offset1 = build_int_cst (ssizetype, 0);
8804 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8808 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8809 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8810 fold_overflow_warning (("assuming pointer wraparound does not "
8811 "occur when comparing P +- C1 with "
8813 WARN_STRICT_OVERFLOW_COMPARISON);
8815 return fold_build2_loc (loc, code, type, offset0, offset1);
8818 /* For non-equal bases we can simplify if they are addresses
8819 of local binding decls or constants. */
8820 else if (indirect_base0 && indirect_base1
8821 /* We know that !operand_equal_p (base0, base1, 0)
8822 because the if condition was false. But make
8823 sure two decls are not the same. */
8825 && TREE_CODE (arg0) == ADDR_EXPR
8826 && TREE_CODE (arg1) == ADDR_EXPR
8827 && (((TREE_CODE (base0) == VAR_DECL
8828 || TREE_CODE (base0) == PARM_DECL)
8829 && (targetm.binds_local_p (base0)
8830 || CONSTANT_CLASS_P (base1)))
8831 || CONSTANT_CLASS_P (base0))
8832 && (((TREE_CODE (base1) == VAR_DECL
8833 || TREE_CODE (base1) == PARM_DECL)
8834 && (targetm.binds_local_p (base1)
8835 || CONSTANT_CLASS_P (base0)))
8836 || CONSTANT_CLASS_P (base1)))
8838 if (code == EQ_EXPR)
8839 return omit_two_operands_loc (loc, type, boolean_false_node,
8841 else if (code == NE_EXPR)
8842 return omit_two_operands_loc (loc, type, boolean_true_node,
8845 /* For equal offsets we can simplify to a comparison of the
8847 else if (bitpos0 == bitpos1
8849 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8851 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8852 && ((offset0 == offset1)
8853 || (offset0 && offset1
8854 && operand_equal_p (offset0, offset1, 0))))
8857 base0 = build_fold_addr_expr_loc (loc, base0);
8859 base1 = build_fold_addr_expr_loc (loc, base1);
8860 return fold_build2_loc (loc, code, type, base0, base1);
8864 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8865 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8866 the resulting offset is smaller in absolute value than the
8868 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8869 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8870 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8871 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8872 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8873 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8874 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8876 tree const1 = TREE_OPERAND (arg0, 1);
8877 tree const2 = TREE_OPERAND (arg1, 1);
8878 tree variable1 = TREE_OPERAND (arg0, 0);
8879 tree variable2 = TREE_OPERAND (arg1, 0);
8881 const char * const warnmsg = G_("assuming signed overflow does not "
8882 "occur when combining constants around "
8885 /* Put the constant on the side where it doesn't overflow and is
8886 of lower absolute value than before. */
8887 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8888 ? MINUS_EXPR : PLUS_EXPR,
8890 if (!TREE_OVERFLOW (cst)
8891 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8893 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8894 return fold_build2_loc (loc, code, type,
8896 fold_build2_loc (loc,
8897 TREE_CODE (arg1), TREE_TYPE (arg1),
8901 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8902 ? MINUS_EXPR : PLUS_EXPR,
8904 if (!TREE_OVERFLOW (cst)
8905 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8907 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8908 return fold_build2_loc (loc, code, type,
8909 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8915 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8916 signed arithmetic case. That form is created by the compiler
8917 often enough for folding it to be of value. One example is in
8918 computing loop trip counts after Operator Strength Reduction. */
8919 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8920 && TREE_CODE (arg0) == MULT_EXPR
8921 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8922 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8923 && integer_zerop (arg1))
8925 tree const1 = TREE_OPERAND (arg0, 1);
8926 tree const2 = arg1; /* zero */
8927 tree variable1 = TREE_OPERAND (arg0, 0);
8928 enum tree_code cmp_code = code;
8930 /* Handle unfolded multiplication by zero. */
8931 if (integer_zerop (const1))
8932 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8934 fold_overflow_warning (("assuming signed overflow does not occur when "
8935 "eliminating multiplication in comparison "
8937 WARN_STRICT_OVERFLOW_COMPARISON);
8939 /* If const1 is negative we swap the sense of the comparison. */
8940 if (tree_int_cst_sgn (const1) < 0)
8941 cmp_code = swap_tree_comparison (cmp_code);
8943 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8946 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8950 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8952 tree targ0 = strip_float_extensions (arg0);
8953 tree targ1 = strip_float_extensions (arg1);
8954 tree newtype = TREE_TYPE (targ0);
8956 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8957 newtype = TREE_TYPE (targ1);
8959 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8960 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8961 return fold_build2_loc (loc, code, type,
8962 fold_convert_loc (loc, newtype, targ0),
8963 fold_convert_loc (loc, newtype, targ1));
8965 /* (-a) CMP (-b) -> b CMP a */
8966 if (TREE_CODE (arg0) == NEGATE_EXPR
8967 && TREE_CODE (arg1) == NEGATE_EXPR)
8968 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8969 TREE_OPERAND (arg0, 0));
8971 if (TREE_CODE (arg1) == REAL_CST)
8973 REAL_VALUE_TYPE cst;
8974 cst = TREE_REAL_CST (arg1);
8976 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8977 if (TREE_CODE (arg0) == NEGATE_EXPR)
8978 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8979 TREE_OPERAND (arg0, 0),
8980 build_real (TREE_TYPE (arg1),
8981 real_value_negate (&cst)));
8983 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8984 /* a CMP (-0) -> a CMP 0 */
8985 if (REAL_VALUE_MINUS_ZERO (cst))
8986 return fold_build2_loc (loc, code, type, arg0,
8987 build_real (TREE_TYPE (arg1), dconst0));
8989 /* x != NaN is always true, other ops are always false. */
8990 if (REAL_VALUE_ISNAN (cst)
8991 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8993 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8994 return omit_one_operand_loc (loc, type, tem, arg0);
8997 /* Fold comparisons against infinity. */
8998 if (REAL_VALUE_ISINF (cst)
8999 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9001 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9002 if (tem != NULL_TREE)
9007 /* If this is a comparison of a real constant with a PLUS_EXPR
9008 or a MINUS_EXPR of a real constant, we can convert it into a
9009 comparison with a revised real constant as long as no overflow
9010 occurs when unsafe_math_optimizations are enabled. */
9011 if (flag_unsafe_math_optimizations
9012 && TREE_CODE (arg1) == REAL_CST
9013 && (TREE_CODE (arg0) == PLUS_EXPR
9014 || TREE_CODE (arg0) == MINUS_EXPR)
9015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9016 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9017 ? MINUS_EXPR : PLUS_EXPR,
9018 arg1, TREE_OPERAND (arg0, 1)))
9019 && !TREE_OVERFLOW (tem))
9020 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9022 /* Likewise, we can simplify a comparison of a real constant with
9023 a MINUS_EXPR whose first operand is also a real constant, i.e.
9024 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9025 floating-point types only if -fassociative-math is set. */
9026 if (flag_associative_math
9027 && TREE_CODE (arg1) == REAL_CST
9028 && TREE_CODE (arg0) == MINUS_EXPR
9029 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9030 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9032 && !TREE_OVERFLOW (tem))
9033 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9034 TREE_OPERAND (arg0, 1), tem);
9036 /* Fold comparisons against built-in math functions. */
9037 if (TREE_CODE (arg1) == REAL_CST
9038 && flag_unsafe_math_optimizations
9039 && ! flag_errno_math)
9041 enum built_in_function fcode = builtin_mathfn_code (arg0);
9043 if (fcode != END_BUILTINS)
9045 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9046 if (tem != NULL_TREE)
9052 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9053 && CONVERT_EXPR_P (arg0))
9055 /* If we are widening one operand of an integer comparison,
9056 see if the other operand is similarly being widened. Perhaps we
9057 can do the comparison in the narrower type. */
9058 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9062 /* Or if we are changing signedness. */
9063 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9068 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9069 constant, we can simplify it. */
9070 if (TREE_CODE (arg1) == INTEGER_CST
9071 && (TREE_CODE (arg0) == MIN_EXPR
9072 || TREE_CODE (arg0) == MAX_EXPR)
9073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9075 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9080 /* Simplify comparison of something with itself. (For IEEE
9081 floating-point, we can only do some of these simplifications.) */
9082 if (operand_equal_p (arg0, arg1, 0))
9087 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9088 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9089 return constant_boolean_node (1, type);
9094 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9095 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9096 return constant_boolean_node (1, type);
9097 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9100 /* For NE, we can only do this simplification if integer
9101 or we don't honor IEEE floating point NaNs. */
9102 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9103 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9105 /* ... fall through ... */
9108 return constant_boolean_node (0, type);
9114 /* If we are comparing an expression that just has comparisons
9115 of two integer values, arithmetic expressions of those comparisons,
9116 and constants, we can simplify it. There are only three cases
9117 to check: the two values can either be equal, the first can be
9118 greater, or the second can be greater. Fold the expression for
9119 those three values. Since each value must be 0 or 1, we have
9120 eight possibilities, each of which corresponds to the constant 0
9121 or 1 or one of the six possible comparisons.
9123 This handles common cases like (a > b) == 0 but also handles
9124 expressions like ((x > y) - (y > x)) > 0, which supposedly
9125 occur in macroized code. */
9127 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9129 tree cval1 = 0, cval2 = 0;
9132 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9133 /* Don't handle degenerate cases here; they should already
9134 have been handled anyway. */
9135 && cval1 != 0 && cval2 != 0
9136 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9137 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9138 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9139 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9140 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9141 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9142 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9144 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9145 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9147 /* We can't just pass T to eval_subst in case cval1 or cval2
9148 was the same as ARG1. */
9151 = fold_build2_loc (loc, code, type,
9152 eval_subst (loc, arg0, cval1, maxval,
9156 = fold_build2_loc (loc, code, type,
9157 eval_subst (loc, arg0, cval1, maxval,
9161 = fold_build2_loc (loc, code, type,
9162 eval_subst (loc, arg0, cval1, minval,
9166 /* All three of these results should be 0 or 1. Confirm they are.
9167 Then use those values to select the proper code to use. */
9169 if (TREE_CODE (high_result) == INTEGER_CST
9170 && TREE_CODE (equal_result) == INTEGER_CST
9171 && TREE_CODE (low_result) == INTEGER_CST)
9173 /* Make a 3-bit mask with the high-order bit being the
9174 value for `>', the next for '=', and the low for '<'. */
9175 switch ((integer_onep (high_result) * 4)
9176 + (integer_onep (equal_result) * 2)
9177 + integer_onep (low_result))
9181 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9202 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9207 tem = save_expr (build2 (code, type, cval1, cval2));
9208 SET_EXPR_LOCATION (tem, loc);
9211 return fold_build2_loc (loc, code, type, cval1, cval2);
9216 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9217 into a single range test. */
9218 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9219 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9220 && TREE_CODE (arg1) == INTEGER_CST
9221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9222 && !integer_zerop (TREE_OPERAND (arg0, 1))
9223 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9224 && !TREE_OVERFLOW (arg1))
9226 tem = fold_div_compare (loc, code, type, arg0, arg1);
9227 if (tem != NULL_TREE)
9231 /* Fold ~X op ~Y as Y op X. */
9232 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9233 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9235 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9236 return fold_build2_loc (loc, code, type,
9237 fold_convert_loc (loc, cmp_type,
9238 TREE_OPERAND (arg1, 0)),
9239 TREE_OPERAND (arg0, 0));
9242 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9243 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9244 && TREE_CODE (arg1) == INTEGER_CST)
9246 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9247 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9248 TREE_OPERAND (arg0, 0),
9249 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9250 fold_convert_loc (loc, cmp_type, arg1)));
9257 /* Subroutine of fold_binary. Optimize complex multiplications of the
9258 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9259 argument EXPR represents the expression "z" of type TYPE. */
9262 fold_mult_zconjz (location_t loc, tree type, tree expr)
9264 tree itype = TREE_TYPE (type);
9265 tree rpart, ipart, tem;
9267 if (TREE_CODE (expr) == COMPLEX_EXPR)
9269 rpart = TREE_OPERAND (expr, 0);
9270 ipart = TREE_OPERAND (expr, 1);
9272 else if (TREE_CODE (expr) == COMPLEX_CST)
9274 rpart = TREE_REALPART (expr);
9275 ipart = TREE_IMAGPART (expr);
9279 expr = save_expr (expr);
9280 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9281 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9284 rpart = save_expr (rpart);
9285 ipart = save_expr (ipart);
9286 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9287 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9288 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9289 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9290 fold_convert_loc (loc, itype, integer_zero_node));
9294 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9295 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9296 guarantees that P and N have the same least significant log2(M) bits.
9297 N is not otherwise constrained. In particular, N is not normalized to
9298 0 <= N < M as is common. In general, the precise value of P is unknown.
9299 M is chosen as large as possible such that constant N can be determined.
9301 Returns M and sets *RESIDUE to N.
9303 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9304 account. This is not always possible due to PR 35705.
9307 static unsigned HOST_WIDE_INT
9308 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9309 bool allow_func_align)
9311 enum tree_code code;
9315 code = TREE_CODE (expr);
9316 if (code == ADDR_EXPR)
9318 expr = TREE_OPERAND (expr, 0);
9319 if (handled_component_p (expr))
9321 HOST_WIDE_INT bitsize, bitpos;
9323 enum machine_mode mode;
9324 int unsignedp, volatilep;
9326 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9327 &mode, &unsignedp, &volatilep, false);
9328 *residue = bitpos / BITS_PER_UNIT;
9331 if (TREE_CODE (offset) == INTEGER_CST)
9332 *residue += TREE_INT_CST_LOW (offset);
9334 /* We don't handle more complicated offset expressions. */
9340 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9341 return DECL_ALIGN_UNIT (expr);
9343 else if (code == POINTER_PLUS_EXPR)
9346 unsigned HOST_WIDE_INT modulus;
9347 enum tree_code inner_code;
9349 op0 = TREE_OPERAND (expr, 0);
9351 modulus = get_pointer_modulus_and_residue (op0, residue,
9354 op1 = TREE_OPERAND (expr, 1);
9356 inner_code = TREE_CODE (op1);
9357 if (inner_code == INTEGER_CST)
9359 *residue += TREE_INT_CST_LOW (op1);
9362 else if (inner_code == MULT_EXPR)
9364 op1 = TREE_OPERAND (op1, 1);
9365 if (TREE_CODE (op1) == INTEGER_CST)
9367 unsigned HOST_WIDE_INT align;
9369 /* Compute the greatest power-of-2 divisor of op1. */
9370 align = TREE_INT_CST_LOW (op1);
9373 /* If align is non-zero and less than *modulus, replace
9374 *modulus with align., If align is 0, then either op1 is 0
9375 or the greatest power-of-2 divisor of op1 doesn't fit in an
9376 unsigned HOST_WIDE_INT. In either case, no additional
9377 constraint is imposed. */
9379 modulus = MIN (modulus, align);
9386 /* If we get here, we were unable to determine anything useful about the
9392 /* Fold a binary expression of code CODE and type TYPE with operands
9393 OP0 and OP1. LOC is the location of the resulting expression.
9394 Return the folded expression if folding is successful. Otherwise,
9395 return NULL_TREE. */
9398 fold_binary_loc (location_t loc,
9399 enum tree_code code, tree type, tree op0, tree op1)
9401 enum tree_code_class kind = TREE_CODE_CLASS (code);
9402 tree arg0, arg1, tem;
9403 tree t1 = NULL_TREE;
9404 bool strict_overflow_p;
9406 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9407 && TREE_CODE_LENGTH (code) == 2
9409 && op1 != NULL_TREE);
9414 /* Strip any conversions that don't change the mode. This is
9415 safe for every expression, except for a comparison expression
9416 because its signedness is derived from its operands. So, in
9417 the latter case, only strip conversions that don't change the
9418 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9421 Note that this is done as an internal manipulation within the
9422 constant folder, in order to find the simplest representation
9423 of the arguments so that their form can be studied. In any
9424 cases, the appropriate type conversions should be put back in
9425 the tree that will get out of the constant folder. */
9427 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9429 STRIP_SIGN_NOPS (arg0);
9430 STRIP_SIGN_NOPS (arg1);
9438 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9439 constant but we can't do arithmetic on them. */
9440 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9441 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9442 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9443 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9444 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9445 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9447 if (kind == tcc_binary)
9449 /* Make sure type and arg0 have the same saturating flag. */
9450 gcc_assert (TYPE_SATURATING (type)
9451 == TYPE_SATURATING (TREE_TYPE (arg0)));
9452 tem = const_binop (code, arg0, arg1);
9454 else if (kind == tcc_comparison)
9455 tem = fold_relational_const (code, type, arg0, arg1);
9459 if (tem != NULL_TREE)
9461 if (TREE_TYPE (tem) != type)
9462 tem = fold_convert_loc (loc, type, tem);
9467 /* If this is a commutative operation, and ARG0 is a constant, move it
9468 to ARG1 to reduce the number of tests below. */
9469 if (commutative_tree_code (code)
9470 && tree_swap_operands_p (arg0, arg1, true))
9471 return fold_build2_loc (loc, code, type, op1, op0);
9473 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9475 First check for cases where an arithmetic operation is applied to a
9476 compound, conditional, or comparison operation. Push the arithmetic
9477 operation inside the compound or conditional to see if any folding
9478 can then be done. Convert comparison to conditional for this purpose.
9479 The also optimizes non-constant cases that used to be done in
9482 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9483 one of the operands is a comparison and the other is a comparison, a
9484 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9485 code below would make the expression more complex. Change it to a
9486 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9487 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9489 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9490 || code == EQ_EXPR || code == NE_EXPR)
9491 && ((truth_value_p (TREE_CODE (arg0))
9492 && (truth_value_p (TREE_CODE (arg1))
9493 || (TREE_CODE (arg1) == BIT_AND_EXPR
9494 && integer_onep (TREE_OPERAND (arg1, 1)))))
9495 || (truth_value_p (TREE_CODE (arg1))
9496 && (truth_value_p (TREE_CODE (arg0))
9497 || (TREE_CODE (arg0) == BIT_AND_EXPR
9498 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9500 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9501 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9504 fold_convert_loc (loc, boolean_type_node, arg0),
9505 fold_convert_loc (loc, boolean_type_node, arg1));
9507 if (code == EQ_EXPR)
9508 tem = invert_truthvalue_loc (loc, tem);
9510 return fold_convert_loc (loc, type, tem);
9513 if (TREE_CODE_CLASS (code) == tcc_binary
9514 || TREE_CODE_CLASS (code) == tcc_comparison)
9516 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9518 tem = fold_build2_loc (loc, code, type,
9519 fold_convert_loc (loc, TREE_TYPE (op0),
9520 TREE_OPERAND (arg0, 1)), op1);
9521 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9522 goto fold_binary_exit;
9524 if (TREE_CODE (arg1) == COMPOUND_EXPR
9525 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9527 tem = fold_build2_loc (loc, code, type, op0,
9528 fold_convert_loc (loc, TREE_TYPE (op1),
9529 TREE_OPERAND (arg1, 1)));
9530 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9531 goto fold_binary_exit;
9534 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9536 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9538 /*cond_first_p=*/1);
9539 if (tem != NULL_TREE)
9543 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9545 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9547 /*cond_first_p=*/0);
9548 if (tem != NULL_TREE)
9556 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9557 if (TREE_CODE (arg0) == ADDR_EXPR
9558 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9560 tree iref = TREE_OPERAND (arg0, 0);
9561 return fold_build2 (MEM_REF, type,
9562 TREE_OPERAND (iref, 0),
9563 int_const_binop (PLUS_EXPR, arg1,
9564 TREE_OPERAND (iref, 1), 0));
9567 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9568 if (TREE_CODE (arg0) == ADDR_EXPR
9569 && handled_component_p (TREE_OPERAND (arg0, 0)))
9572 HOST_WIDE_INT coffset;
9573 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9577 return fold_build2 (MEM_REF, type,
9578 build_fold_addr_expr (base),
9579 int_const_binop (PLUS_EXPR, arg1,
9580 size_int (coffset), 0));
9585 case POINTER_PLUS_EXPR:
9586 /* 0 +p index -> (type)index */
9587 if (integer_zerop (arg0))
9588 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9590 /* PTR +p 0 -> PTR */
9591 if (integer_zerop (arg1))
9592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9594 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9595 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9596 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9597 return fold_convert_loc (loc, type,
9598 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9599 fold_convert_loc (loc, sizetype,
9601 fold_convert_loc (loc, sizetype,
9604 /* index +p PTR -> PTR +p index */
9605 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9606 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9607 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9608 fold_convert_loc (loc, type, arg1),
9609 fold_convert_loc (loc, sizetype, arg0));
9611 /* (PTR +p B) +p A -> PTR +p (B + A) */
9612 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9615 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9616 tree arg00 = TREE_OPERAND (arg0, 0);
9617 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9618 arg01, fold_convert_loc (loc, sizetype, arg1));
9619 return fold_convert_loc (loc, type,
9620 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9625 /* PTR_CST +p CST -> CST1 */
9626 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9627 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9628 fold_convert_loc (loc, type, arg1));
9630 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9631 of the array. Loop optimizer sometimes produce this type of
9633 if (TREE_CODE (arg0) == ADDR_EXPR)
9635 tem = try_move_mult_to_index (loc, arg0,
9636 fold_convert_loc (loc, sizetype, arg1));
9638 return fold_convert_loc (loc, type, tem);
9644 /* A + (-B) -> A - B */
9645 if (TREE_CODE (arg1) == NEGATE_EXPR)
9646 return fold_build2_loc (loc, MINUS_EXPR, type,
9647 fold_convert_loc (loc, type, arg0),
9648 fold_convert_loc (loc, type,
9649 TREE_OPERAND (arg1, 0)));
9650 /* (-A) + B -> B - A */
9651 if (TREE_CODE (arg0) == NEGATE_EXPR
9652 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9653 return fold_build2_loc (loc, MINUS_EXPR, type,
9654 fold_convert_loc (loc, type, arg1),
9655 fold_convert_loc (loc, type,
9656 TREE_OPERAND (arg0, 0)));
9658 if (INTEGRAL_TYPE_P (type))
9660 /* Convert ~A + 1 to -A. */
9661 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9662 && integer_onep (arg1))
9663 return fold_build1_loc (loc, NEGATE_EXPR, type,
9664 fold_convert_loc (loc, type,
9665 TREE_OPERAND (arg0, 0)));
9668 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9669 && !TYPE_OVERFLOW_TRAPS (type))
9671 tree tem = TREE_OPERAND (arg0, 0);
9674 if (operand_equal_p (tem, arg1, 0))
9676 t1 = build_int_cst_type (type, -1);
9677 return omit_one_operand_loc (loc, type, t1, arg1);
9682 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9683 && !TYPE_OVERFLOW_TRAPS (type))
9685 tree tem = TREE_OPERAND (arg1, 0);
9688 if (operand_equal_p (arg0, tem, 0))
9690 t1 = build_int_cst_type (type, -1);
9691 return omit_one_operand_loc (loc, type, t1, arg0);
9695 /* X + (X / CST) * -CST is X % CST. */
9696 if (TREE_CODE (arg1) == MULT_EXPR
9697 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9698 && operand_equal_p (arg0,
9699 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9701 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9702 tree cst1 = TREE_OPERAND (arg1, 1);
9703 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9705 if (sum && integer_zerop (sum))
9706 return fold_convert_loc (loc, type,
9707 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9708 TREE_TYPE (arg0), arg0,
9713 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9714 same or one. Make sure type is not saturating.
9715 fold_plusminus_mult_expr will re-associate. */
9716 if ((TREE_CODE (arg0) == MULT_EXPR
9717 || TREE_CODE (arg1) == MULT_EXPR)
9718 && !TYPE_SATURATING (type)
9719 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9721 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9726 if (! FLOAT_TYPE_P (type))
9728 if (integer_zerop (arg1))
9729 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9731 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9732 with a constant, and the two constants have no bits in common,
9733 we should treat this as a BIT_IOR_EXPR since this may produce more
9735 if (TREE_CODE (arg0) == BIT_AND_EXPR
9736 && TREE_CODE (arg1) == BIT_AND_EXPR
9737 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9738 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9739 && integer_zerop (const_binop (BIT_AND_EXPR,
9740 TREE_OPERAND (arg0, 1),
9741 TREE_OPERAND (arg1, 1))))
9743 code = BIT_IOR_EXPR;
9747 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9748 (plus (plus (mult) (mult)) (foo)) so that we can
9749 take advantage of the factoring cases below. */
9750 if (((TREE_CODE (arg0) == PLUS_EXPR
9751 || TREE_CODE (arg0) == MINUS_EXPR)
9752 && TREE_CODE (arg1) == MULT_EXPR)
9753 || ((TREE_CODE (arg1) == PLUS_EXPR
9754 || TREE_CODE (arg1) == MINUS_EXPR)
9755 && TREE_CODE (arg0) == MULT_EXPR))
9757 tree parg0, parg1, parg, marg;
9758 enum tree_code pcode;
9760 if (TREE_CODE (arg1) == MULT_EXPR)
9761 parg = arg0, marg = arg1;
9763 parg = arg1, marg = arg0;
9764 pcode = TREE_CODE (parg);
9765 parg0 = TREE_OPERAND (parg, 0);
9766 parg1 = TREE_OPERAND (parg, 1);
9770 if (TREE_CODE (parg0) == MULT_EXPR
9771 && TREE_CODE (parg1) != MULT_EXPR)
9772 return fold_build2_loc (loc, pcode, type,
9773 fold_build2_loc (loc, PLUS_EXPR, type,
9774 fold_convert_loc (loc, type,
9776 fold_convert_loc (loc, type,
9778 fold_convert_loc (loc, type, parg1));
9779 if (TREE_CODE (parg0) != MULT_EXPR
9780 && TREE_CODE (parg1) == MULT_EXPR)
9782 fold_build2_loc (loc, PLUS_EXPR, type,
9783 fold_convert_loc (loc, type, parg0),
9784 fold_build2_loc (loc, pcode, type,
9785 fold_convert_loc (loc, type, marg),
9786 fold_convert_loc (loc, type,
9792 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9793 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9794 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9796 /* Likewise if the operands are reversed. */
9797 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9798 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9800 /* Convert X + -C into X - C. */
9801 if (TREE_CODE (arg1) == REAL_CST
9802 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9804 tem = fold_negate_const (arg1, type);
9805 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9806 return fold_build2_loc (loc, MINUS_EXPR, type,
9807 fold_convert_loc (loc, type, arg0),
9808 fold_convert_loc (loc, type, tem));
9811 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9812 to __complex__ ( x, y ). This is not the same for SNaNs or
9813 if signed zeros are involved. */
9814 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9815 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9816 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9818 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9819 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9820 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9821 bool arg0rz = false, arg0iz = false;
9822 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9823 || (arg0i && (arg0iz = real_zerop (arg0i))))
9825 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9826 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9827 if (arg0rz && arg1i && real_zerop (arg1i))
9829 tree rp = arg1r ? arg1r
9830 : build1 (REALPART_EXPR, rtype, arg1);
9831 tree ip = arg0i ? arg0i
9832 : build1 (IMAGPART_EXPR, rtype, arg0);
9833 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9835 else if (arg0iz && arg1r && real_zerop (arg1r))
9837 tree rp = arg0r ? arg0r
9838 : build1 (REALPART_EXPR, rtype, arg0);
9839 tree ip = arg1i ? arg1i
9840 : build1 (IMAGPART_EXPR, rtype, arg1);
9841 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9846 if (flag_unsafe_math_optimizations
9847 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9848 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9849 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9852 /* Convert x+x into x*2.0. */
9853 if (operand_equal_p (arg0, arg1, 0)
9854 && SCALAR_FLOAT_TYPE_P (type))
9855 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9856 build_real (type, dconst2));
9858 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9859 We associate floats only if the user has specified
9860 -fassociative-math. */
9861 if (flag_associative_math
9862 && TREE_CODE (arg1) == PLUS_EXPR
9863 && TREE_CODE (arg0) != MULT_EXPR)
9865 tree tree10 = TREE_OPERAND (arg1, 0);
9866 tree tree11 = TREE_OPERAND (arg1, 1);
9867 if (TREE_CODE (tree11) == MULT_EXPR
9868 && TREE_CODE (tree10) == MULT_EXPR)
9871 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9872 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9875 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9876 We associate floats only if the user has specified
9877 -fassociative-math. */
9878 if (flag_associative_math
9879 && TREE_CODE (arg0) == PLUS_EXPR
9880 && TREE_CODE (arg1) != MULT_EXPR)
9882 tree tree00 = TREE_OPERAND (arg0, 0);
9883 tree tree01 = TREE_OPERAND (arg0, 1);
9884 if (TREE_CODE (tree01) == MULT_EXPR
9885 && TREE_CODE (tree00) == MULT_EXPR)
9888 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9889 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9895 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9896 is a rotate of A by C1 bits. */
9897 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9898 is a rotate of A by B bits. */
9900 enum tree_code code0, code1;
9902 code0 = TREE_CODE (arg0);
9903 code1 = TREE_CODE (arg1);
9904 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9905 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9906 && operand_equal_p (TREE_OPERAND (arg0, 0),
9907 TREE_OPERAND (arg1, 0), 0)
9908 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9909 TYPE_UNSIGNED (rtype))
9910 /* Only create rotates in complete modes. Other cases are not
9911 expanded properly. */
9912 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9914 tree tree01, tree11;
9915 enum tree_code code01, code11;
9917 tree01 = TREE_OPERAND (arg0, 1);
9918 tree11 = TREE_OPERAND (arg1, 1);
9919 STRIP_NOPS (tree01);
9920 STRIP_NOPS (tree11);
9921 code01 = TREE_CODE (tree01);
9922 code11 = TREE_CODE (tree11);
9923 if (code01 == INTEGER_CST
9924 && code11 == INTEGER_CST
9925 && TREE_INT_CST_HIGH (tree01) == 0
9926 && TREE_INT_CST_HIGH (tree11) == 0
9927 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9928 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9930 tem = build2 (LROTATE_EXPR,
9931 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9932 TREE_OPERAND (arg0, 0),
9933 code0 == LSHIFT_EXPR
9935 SET_EXPR_LOCATION (tem, loc);
9936 return fold_convert_loc (loc, type, tem);
9938 else if (code11 == MINUS_EXPR)
9940 tree tree110, tree111;
9941 tree110 = TREE_OPERAND (tree11, 0);
9942 tree111 = TREE_OPERAND (tree11, 1);
9943 STRIP_NOPS (tree110);
9944 STRIP_NOPS (tree111);
9945 if (TREE_CODE (tree110) == INTEGER_CST
9946 && 0 == compare_tree_int (tree110,
9948 (TREE_TYPE (TREE_OPERAND
9950 && operand_equal_p (tree01, tree111, 0))
9952 fold_convert_loc (loc, type,
9953 build2 ((code0 == LSHIFT_EXPR
9956 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9957 TREE_OPERAND (arg0, 0), tree01));
9959 else if (code01 == MINUS_EXPR)
9961 tree tree010, tree011;
9962 tree010 = TREE_OPERAND (tree01, 0);
9963 tree011 = TREE_OPERAND (tree01, 1);
9964 STRIP_NOPS (tree010);
9965 STRIP_NOPS (tree011);
9966 if (TREE_CODE (tree010) == INTEGER_CST
9967 && 0 == compare_tree_int (tree010,
9969 (TREE_TYPE (TREE_OPERAND
9971 && operand_equal_p (tree11, tree011, 0))
9972 return fold_convert_loc
9974 build2 ((code0 != LSHIFT_EXPR
9977 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9978 TREE_OPERAND (arg0, 0), tree11));
9984 /* In most languages, can't associate operations on floats through
9985 parentheses. Rather than remember where the parentheses were, we
9986 don't associate floats at all, unless the user has specified
9988 And, we need to make sure type is not saturating. */
9990 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9991 && !TYPE_SATURATING (type))
9993 tree var0, con0, lit0, minus_lit0;
9994 tree var1, con1, lit1, minus_lit1;
9997 /* Split both trees into variables, constants, and literals. Then
9998 associate each group together, the constants with literals,
9999 then the result with variables. This increases the chances of
10000 literals being recombined later and of generating relocatable
10001 expressions for the sum of a constant and literal. */
10002 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10003 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10004 code == MINUS_EXPR);
10006 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10007 if (code == MINUS_EXPR)
10010 /* With undefined overflow we can only associate constants with one
10011 variable, and constants whose association doesn't overflow. */
10012 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10013 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10020 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10021 tmp0 = TREE_OPERAND (tmp0, 0);
10022 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10023 tmp1 = TREE_OPERAND (tmp1, 0);
10024 /* The only case we can still associate with two variables
10025 is if they are the same, modulo negation. */
10026 if (!operand_equal_p (tmp0, tmp1, 0))
10030 if (ok && lit0 && lit1)
10032 tree tmp0 = fold_convert (type, lit0);
10033 tree tmp1 = fold_convert (type, lit1);
10035 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10036 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10041 /* Only do something if we found more than two objects. Otherwise,
10042 nothing has changed and we risk infinite recursion. */
10044 && (2 < ((var0 != 0) + (var1 != 0)
10045 + (con0 != 0) + (con1 != 0)
10046 + (lit0 != 0) + (lit1 != 0)
10047 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10049 var0 = associate_trees (loc, var0, var1, code, type);
10050 con0 = associate_trees (loc, con0, con1, code, type);
10051 lit0 = associate_trees (loc, lit0, lit1, code, type);
10052 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10054 /* Preserve the MINUS_EXPR if the negative part of the literal is
10055 greater than the positive part. Otherwise, the multiplicative
10056 folding code (i.e extract_muldiv) may be fooled in case
10057 unsigned constants are subtracted, like in the following
10058 example: ((X*2 + 4) - 8U)/2. */
10059 if (minus_lit0 && lit0)
10061 if (TREE_CODE (lit0) == INTEGER_CST
10062 && TREE_CODE (minus_lit0) == INTEGER_CST
10063 && tree_int_cst_lt (lit0, minus_lit0))
10065 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10071 lit0 = associate_trees (loc, lit0, minus_lit0,
10080 fold_convert_loc (loc, type,
10081 associate_trees (loc, var0, minus_lit0,
10082 MINUS_EXPR, type));
10085 con0 = associate_trees (loc, con0, minus_lit0,
10088 fold_convert_loc (loc, type,
10089 associate_trees (loc, var0, con0,
10094 con0 = associate_trees (loc, con0, lit0, code, type);
10096 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10104 /* Pointer simplifications for subtraction, simple reassociations. */
10105 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10107 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10108 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10109 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10111 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10112 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10113 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10114 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10115 return fold_build2_loc (loc, PLUS_EXPR, type,
10116 fold_build2_loc (loc, MINUS_EXPR, type,
10118 fold_build2_loc (loc, MINUS_EXPR, type,
10121 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10122 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10124 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10125 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10126 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10127 fold_convert_loc (loc, type, arg1));
10129 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10132 /* A - (-B) -> A + B */
10133 if (TREE_CODE (arg1) == NEGATE_EXPR)
10134 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10135 fold_convert_loc (loc, type,
10136 TREE_OPERAND (arg1, 0)));
10137 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10138 if (TREE_CODE (arg0) == NEGATE_EXPR
10139 && (FLOAT_TYPE_P (type)
10140 || INTEGRAL_TYPE_P (type))
10141 && negate_expr_p (arg1)
10142 && reorder_operands_p (arg0, arg1))
10143 return fold_build2_loc (loc, MINUS_EXPR, type,
10144 fold_convert_loc (loc, type,
10145 negate_expr (arg1)),
10146 fold_convert_loc (loc, type,
10147 TREE_OPERAND (arg0, 0)));
10148 /* Convert -A - 1 to ~A. */
10149 if (INTEGRAL_TYPE_P (type)
10150 && TREE_CODE (arg0) == NEGATE_EXPR
10151 && integer_onep (arg1)
10152 && !TYPE_OVERFLOW_TRAPS (type))
10153 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10154 fold_convert_loc (loc, type,
10155 TREE_OPERAND (arg0, 0)));
10157 /* Convert -1 - A to ~A. */
10158 if (INTEGRAL_TYPE_P (type)
10159 && integer_all_onesp (arg0))
10160 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10163 /* X - (X / CST) * CST is X % CST. */
10164 if (INTEGRAL_TYPE_P (type)
10165 && TREE_CODE (arg1) == MULT_EXPR
10166 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10167 && operand_equal_p (arg0,
10168 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10169 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10170 TREE_OPERAND (arg1, 1), 0))
10172 fold_convert_loc (loc, type,
10173 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10174 arg0, TREE_OPERAND (arg1, 1)));
10176 if (! FLOAT_TYPE_P (type))
10178 if (integer_zerop (arg0))
10179 return negate_expr (fold_convert_loc (loc, type, arg1));
10180 if (integer_zerop (arg1))
10181 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10183 /* Fold A - (A & B) into ~B & A. */
10184 if (!TREE_SIDE_EFFECTS (arg0)
10185 && TREE_CODE (arg1) == BIT_AND_EXPR)
10187 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10189 tree arg10 = fold_convert_loc (loc, type,
10190 TREE_OPERAND (arg1, 0));
10191 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10192 fold_build1_loc (loc, BIT_NOT_EXPR,
10194 fold_convert_loc (loc, type, arg0));
10196 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10198 tree arg11 = fold_convert_loc (loc,
10199 type, TREE_OPERAND (arg1, 1));
10200 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10201 fold_build1_loc (loc, BIT_NOT_EXPR,
10203 fold_convert_loc (loc, type, arg0));
10207 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10208 any power of 2 minus 1. */
10209 if (TREE_CODE (arg0) == BIT_AND_EXPR
10210 && TREE_CODE (arg1) == BIT_AND_EXPR
10211 && operand_equal_p (TREE_OPERAND (arg0, 0),
10212 TREE_OPERAND (arg1, 0), 0))
10214 tree mask0 = TREE_OPERAND (arg0, 1);
10215 tree mask1 = TREE_OPERAND (arg1, 1);
10216 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10218 if (operand_equal_p (tem, mask1, 0))
10220 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10221 TREE_OPERAND (arg0, 0), mask1);
10222 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10227 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10228 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10229 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10231 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10232 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10233 (-ARG1 + ARG0) reduces to -ARG1. */
10234 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10235 return negate_expr (fold_convert_loc (loc, type, arg1));
10237 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10238 __complex__ ( x, -y ). This is not the same for SNaNs or if
10239 signed zeros are involved. */
10240 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10241 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10242 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10244 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10245 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10246 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10247 bool arg0rz = false, arg0iz = false;
10248 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10249 || (arg0i && (arg0iz = real_zerop (arg0i))))
10251 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10252 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10253 if (arg0rz && arg1i && real_zerop (arg1i))
10255 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10257 : build1 (REALPART_EXPR, rtype, arg1));
10258 tree ip = arg0i ? arg0i
10259 : build1 (IMAGPART_EXPR, rtype, arg0);
10260 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10262 else if (arg0iz && arg1r && real_zerop (arg1r))
10264 tree rp = arg0r ? arg0r
10265 : build1 (REALPART_EXPR, rtype, arg0);
10266 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10268 : build1 (IMAGPART_EXPR, rtype, arg1));
10269 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10274 /* Fold &x - &x. This can happen from &x.foo - &x.
10275 This is unsafe for certain floats even in non-IEEE formats.
10276 In IEEE, it is unsafe because it does wrong for NaNs.
10277 Also note that operand_equal_p is always false if an operand
10280 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10281 && operand_equal_p (arg0, arg1, 0))
10282 return fold_convert_loc (loc, type, integer_zero_node);
10284 /* A - B -> A + (-B) if B is easily negatable. */
10285 if (negate_expr_p (arg1)
10286 && ((FLOAT_TYPE_P (type)
10287 /* Avoid this transformation if B is a positive REAL_CST. */
10288 && (TREE_CODE (arg1) != REAL_CST
10289 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10290 || INTEGRAL_TYPE_P (type)))
10291 return fold_build2_loc (loc, PLUS_EXPR, type,
10292 fold_convert_loc (loc, type, arg0),
10293 fold_convert_loc (loc, type,
10294 negate_expr (arg1)));
10296 /* Try folding difference of addresses. */
10298 HOST_WIDE_INT diff;
10300 if ((TREE_CODE (arg0) == ADDR_EXPR
10301 || TREE_CODE (arg1) == ADDR_EXPR)
10302 && ptr_difference_const (arg0, arg1, &diff))
10303 return build_int_cst_type (type, diff);
10306 /* Fold &a[i] - &a[j] to i-j. */
10307 if (TREE_CODE (arg0) == ADDR_EXPR
10308 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10309 && TREE_CODE (arg1) == ADDR_EXPR
10310 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10312 tree aref0 = TREE_OPERAND (arg0, 0);
10313 tree aref1 = TREE_OPERAND (arg1, 0);
10314 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10315 TREE_OPERAND (aref1, 0), 0))
10317 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10318 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10319 tree esz = array_ref_element_size (aref0);
10320 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10321 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10322 fold_convert_loc (loc, type, esz));
10327 if (FLOAT_TYPE_P (type)
10328 && flag_unsafe_math_optimizations
10329 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10330 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10331 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10334 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10335 same or one. Make sure type is not saturating.
10336 fold_plusminus_mult_expr will re-associate. */
10337 if ((TREE_CODE (arg0) == MULT_EXPR
10338 || TREE_CODE (arg1) == MULT_EXPR)
10339 && !TYPE_SATURATING (type)
10340 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10342 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10350 /* (-A) * (-B) -> A * B */
10351 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10352 return fold_build2_loc (loc, MULT_EXPR, type,
10353 fold_convert_loc (loc, type,
10354 TREE_OPERAND (arg0, 0)),
10355 fold_convert_loc (loc, type,
10356 negate_expr (arg1)));
10357 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10358 return fold_build2_loc (loc, MULT_EXPR, type,
10359 fold_convert_loc (loc, type,
10360 negate_expr (arg0)),
10361 fold_convert_loc (loc, type,
10362 TREE_OPERAND (arg1, 0)));
10364 if (! FLOAT_TYPE_P (type))
10366 if (integer_zerop (arg1))
10367 return omit_one_operand_loc (loc, type, arg1, arg0);
10368 if (integer_onep (arg1))
10369 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10370 /* Transform x * -1 into -x. Make sure to do the negation
10371 on the original operand with conversions not stripped
10372 because we can only strip non-sign-changing conversions. */
10373 if (integer_all_onesp (arg1))
10374 return fold_convert_loc (loc, type, negate_expr (op0));
10375 /* Transform x * -C into -x * C if x is easily negatable. */
10376 if (TREE_CODE (arg1) == INTEGER_CST
10377 && tree_int_cst_sgn (arg1) == -1
10378 && negate_expr_p (arg0)
10379 && (tem = negate_expr (arg1)) != arg1
10380 && !TREE_OVERFLOW (tem))
10381 return fold_build2_loc (loc, MULT_EXPR, type,
10382 fold_convert_loc (loc, type,
10383 negate_expr (arg0)),
10386 /* (a * (1 << b)) is (a << b) */
10387 if (TREE_CODE (arg1) == LSHIFT_EXPR
10388 && integer_onep (TREE_OPERAND (arg1, 0)))
10389 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10390 TREE_OPERAND (arg1, 1));
10391 if (TREE_CODE (arg0) == LSHIFT_EXPR
10392 && integer_onep (TREE_OPERAND (arg0, 0)))
10393 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10394 TREE_OPERAND (arg0, 1));
10396 /* (A + A) * C -> A * 2 * C */
10397 if (TREE_CODE (arg0) == PLUS_EXPR
10398 && TREE_CODE (arg1) == INTEGER_CST
10399 && operand_equal_p (TREE_OPERAND (arg0, 0),
10400 TREE_OPERAND (arg0, 1), 0))
10401 return fold_build2_loc (loc, MULT_EXPR, type,
10402 omit_one_operand_loc (loc, type,
10403 TREE_OPERAND (arg0, 0),
10404 TREE_OPERAND (arg0, 1)),
10405 fold_build2_loc (loc, MULT_EXPR, type,
10406 build_int_cst (type, 2) , arg1));
10408 strict_overflow_p = false;
10409 if (TREE_CODE (arg1) == INTEGER_CST
10410 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10411 &strict_overflow_p)))
10413 if (strict_overflow_p)
10414 fold_overflow_warning (("assuming signed overflow does not "
10415 "occur when simplifying "
10417 WARN_STRICT_OVERFLOW_MISC);
10418 return fold_convert_loc (loc, type, tem);
10421 /* Optimize z * conj(z) for integer complex numbers. */
10422 if (TREE_CODE (arg0) == CONJ_EXPR
10423 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10424 return fold_mult_zconjz (loc, type, arg1);
10425 if (TREE_CODE (arg1) == CONJ_EXPR
10426 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10427 return fold_mult_zconjz (loc, type, arg0);
10431 /* Maybe fold x * 0 to 0. The expressions aren't the same
10432 when x is NaN, since x * 0 is also NaN. Nor are they the
10433 same in modes with signed zeros, since multiplying a
10434 negative value by 0 gives -0, not +0. */
10435 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10436 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10437 && real_zerop (arg1))
10438 return omit_one_operand_loc (loc, type, arg1, arg0);
10439 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10440 Likewise for complex arithmetic with signed zeros. */
10441 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10442 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10443 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10444 && real_onep (arg1))
10445 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10447 /* Transform x * -1.0 into -x. */
10448 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10449 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10450 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10451 && real_minus_onep (arg1))
10452 return fold_convert_loc (loc, type, negate_expr (arg0));
10454 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10455 the result for floating point types due to rounding so it is applied
10456 only if -fassociative-math was specify. */
10457 if (flag_associative_math
10458 && TREE_CODE (arg0) == RDIV_EXPR
10459 && TREE_CODE (arg1) == REAL_CST
10460 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10462 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10465 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10466 TREE_OPERAND (arg0, 1));
10469 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10470 if (operand_equal_p (arg0, arg1, 0))
10472 tree tem = fold_strip_sign_ops (arg0);
10473 if (tem != NULL_TREE)
10475 tem = fold_convert_loc (loc, type, tem);
10476 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10480 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10481 This is not the same for NaNs or if signed zeros are
10483 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10484 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10485 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10486 && TREE_CODE (arg1) == COMPLEX_CST
10487 && real_zerop (TREE_REALPART (arg1)))
10489 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10490 if (real_onep (TREE_IMAGPART (arg1)))
10492 fold_build2_loc (loc, COMPLEX_EXPR, type,
10493 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10495 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10496 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10498 fold_build2_loc (loc, COMPLEX_EXPR, type,
10499 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10500 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10504 /* Optimize z * conj(z) for floating point complex numbers.
10505 Guarded by flag_unsafe_math_optimizations as non-finite
10506 imaginary components don't produce scalar results. */
10507 if (flag_unsafe_math_optimizations
10508 && TREE_CODE (arg0) == CONJ_EXPR
10509 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10510 return fold_mult_zconjz (loc, type, arg1);
10511 if (flag_unsafe_math_optimizations
10512 && TREE_CODE (arg1) == CONJ_EXPR
10513 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10514 return fold_mult_zconjz (loc, type, arg0);
10516 if (flag_unsafe_math_optimizations)
10518 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10519 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10521 /* Optimizations of root(...)*root(...). */
10522 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10525 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10526 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10528 /* Optimize sqrt(x)*sqrt(x) as x. */
10529 if (BUILTIN_SQRT_P (fcode0)
10530 && operand_equal_p (arg00, arg10, 0)
10531 && ! HONOR_SNANS (TYPE_MODE (type)))
10534 /* Optimize root(x)*root(y) as root(x*y). */
10535 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10536 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10537 return build_call_expr_loc (loc, rootfn, 1, arg);
10540 /* Optimize expN(x)*expN(y) as expN(x+y). */
10541 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10543 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10544 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10545 CALL_EXPR_ARG (arg0, 0),
10546 CALL_EXPR_ARG (arg1, 0));
10547 return build_call_expr_loc (loc, expfn, 1, arg);
10550 /* Optimizations of pow(...)*pow(...). */
10551 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10552 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10553 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10555 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10556 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10557 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10558 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10560 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10561 if (operand_equal_p (arg01, arg11, 0))
10563 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10564 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10566 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10569 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10570 if (operand_equal_p (arg00, arg10, 0))
10572 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10573 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10575 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10579 /* Optimize tan(x)*cos(x) as sin(x). */
10580 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10581 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10582 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10583 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10584 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10585 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10586 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10587 CALL_EXPR_ARG (arg1, 0), 0))
10589 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10591 if (sinfn != NULL_TREE)
10592 return build_call_expr_loc (loc, sinfn, 1,
10593 CALL_EXPR_ARG (arg0, 0));
10596 /* Optimize x*pow(x,c) as pow(x,c+1). */
10597 if (fcode1 == BUILT_IN_POW
10598 || fcode1 == BUILT_IN_POWF
10599 || fcode1 == BUILT_IN_POWL)
10601 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10602 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10603 if (TREE_CODE (arg11) == REAL_CST
10604 && !TREE_OVERFLOW (arg11)
10605 && operand_equal_p (arg0, arg10, 0))
10607 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10611 c = TREE_REAL_CST (arg11);
10612 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10613 arg = build_real (type, c);
10614 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10618 /* Optimize pow(x,c)*x as pow(x,c+1). */
10619 if (fcode0 == BUILT_IN_POW
10620 || fcode0 == BUILT_IN_POWF
10621 || fcode0 == BUILT_IN_POWL)
10623 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10624 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10625 if (TREE_CODE (arg01) == REAL_CST
10626 && !TREE_OVERFLOW (arg01)
10627 && operand_equal_p (arg1, arg00, 0))
10629 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10633 c = TREE_REAL_CST (arg01);
10634 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10635 arg = build_real (type, c);
10636 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10640 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10641 if (optimize_function_for_speed_p (cfun)
10642 && operand_equal_p (arg0, arg1, 0))
10644 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10648 tree arg = build_real (type, dconst2);
10649 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10658 if (integer_all_onesp (arg1))
10659 return omit_one_operand_loc (loc, type, arg1, arg0);
10660 if (integer_zerop (arg1))
10661 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10662 if (operand_equal_p (arg0, arg1, 0))
10663 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10665 /* ~X | X is -1. */
10666 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10667 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10669 t1 = fold_convert_loc (loc, type, integer_zero_node);
10670 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10671 return omit_one_operand_loc (loc, type, t1, arg1);
10674 /* X | ~X is -1. */
10675 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10676 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10678 t1 = fold_convert_loc (loc, type, integer_zero_node);
10679 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10680 return omit_one_operand_loc (loc, type, t1, arg0);
10683 /* Canonicalize (X & C1) | C2. */
10684 if (TREE_CODE (arg0) == BIT_AND_EXPR
10685 && TREE_CODE (arg1) == INTEGER_CST
10686 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10688 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10689 int width = TYPE_PRECISION (type), w;
10690 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10691 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10692 hi2 = TREE_INT_CST_HIGH (arg1);
10693 lo2 = TREE_INT_CST_LOW (arg1);
10695 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10696 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10697 return omit_one_operand_loc (loc, type, arg1,
10698 TREE_OPERAND (arg0, 0));
10700 if (width > HOST_BITS_PER_WIDE_INT)
10702 mhi = (unsigned HOST_WIDE_INT) -1
10703 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10709 mlo = (unsigned HOST_WIDE_INT) -1
10710 >> (HOST_BITS_PER_WIDE_INT - width);
10713 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10714 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10715 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10716 TREE_OPERAND (arg0, 0), arg1);
10718 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10719 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10720 mode which allows further optimizations. */
10727 for (w = BITS_PER_UNIT;
10728 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10731 unsigned HOST_WIDE_INT mask
10732 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10733 if (((lo1 | lo2) & mask) == mask
10734 && (lo1 & ~mask) == 0 && hi1 == 0)
10741 if (hi3 != hi1 || lo3 != lo1)
10742 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10743 fold_build2_loc (loc, BIT_AND_EXPR, type,
10744 TREE_OPERAND (arg0, 0),
10745 build_int_cst_wide (type,
10750 /* (X & Y) | Y is (X, Y). */
10751 if (TREE_CODE (arg0) == BIT_AND_EXPR
10752 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10753 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10754 /* (X & Y) | X is (Y, X). */
10755 if (TREE_CODE (arg0) == BIT_AND_EXPR
10756 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10757 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10758 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10759 /* X | (X & Y) is (Y, X). */
10760 if (TREE_CODE (arg1) == BIT_AND_EXPR
10761 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10762 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10763 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10764 /* X | (Y & X) is (Y, X). */
10765 if (TREE_CODE (arg1) == BIT_AND_EXPR
10766 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10767 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10768 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10770 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10771 if (t1 != NULL_TREE)
10774 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10776 This results in more efficient code for machines without a NAND
10777 instruction. Combine will canonicalize to the first form
10778 which will allow use of NAND instructions provided by the
10779 backend if they exist. */
10780 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10781 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10784 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10785 build2 (BIT_AND_EXPR, type,
10786 fold_convert_loc (loc, type,
10787 TREE_OPERAND (arg0, 0)),
10788 fold_convert_loc (loc, type,
10789 TREE_OPERAND (arg1, 0))));
10792 /* See if this can be simplified into a rotate first. If that
10793 is unsuccessful continue in the association code. */
10797 if (integer_zerop (arg1))
10798 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10799 if (integer_all_onesp (arg1))
10800 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10801 if (operand_equal_p (arg0, arg1, 0))
10802 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10804 /* ~X ^ X is -1. */
10805 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10806 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10808 t1 = fold_convert_loc (loc, type, integer_zero_node);
10809 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10810 return omit_one_operand_loc (loc, type, t1, arg1);
10813 /* X ^ ~X is -1. */
10814 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10815 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10817 t1 = fold_convert_loc (loc, type, integer_zero_node);
10818 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10819 return omit_one_operand_loc (loc, type, t1, arg0);
10822 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10823 with a constant, and the two constants have no bits in common,
10824 we should treat this as a BIT_IOR_EXPR since this may produce more
10825 simplifications. */
10826 if (TREE_CODE (arg0) == BIT_AND_EXPR
10827 && TREE_CODE (arg1) == BIT_AND_EXPR
10828 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10829 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10830 && integer_zerop (const_binop (BIT_AND_EXPR,
10831 TREE_OPERAND (arg0, 1),
10832 TREE_OPERAND (arg1, 1))))
10834 code = BIT_IOR_EXPR;
10838 /* (X | Y) ^ X -> Y & ~ X*/
10839 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10840 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10842 tree t2 = TREE_OPERAND (arg0, 1);
10843 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10845 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10846 fold_convert_loc (loc, type, t2),
10847 fold_convert_loc (loc, type, t1));
10851 /* (Y | X) ^ X -> Y & ~ X*/
10852 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10853 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10855 tree t2 = TREE_OPERAND (arg0, 0);
10856 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10858 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10859 fold_convert_loc (loc, type, t2),
10860 fold_convert_loc (loc, type, t1));
10864 /* X ^ (X | Y) -> Y & ~ X*/
10865 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10866 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10868 tree t2 = TREE_OPERAND (arg1, 1);
10869 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10871 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10872 fold_convert_loc (loc, type, t2),
10873 fold_convert_loc (loc, type, t1));
10877 /* X ^ (Y | X) -> Y & ~ X*/
10878 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10879 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10881 tree t2 = TREE_OPERAND (arg1, 0);
10882 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10884 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10885 fold_convert_loc (loc, type, t2),
10886 fold_convert_loc (loc, type, t1));
10890 /* Convert ~X ^ ~Y to X ^ Y. */
10891 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10892 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10893 return fold_build2_loc (loc, code, type,
10894 fold_convert_loc (loc, type,
10895 TREE_OPERAND (arg0, 0)),
10896 fold_convert_loc (loc, type,
10897 TREE_OPERAND (arg1, 0)));
10899 /* Convert ~X ^ C to X ^ ~C. */
10900 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10901 && TREE_CODE (arg1) == INTEGER_CST)
10902 return fold_build2_loc (loc, code, type,
10903 fold_convert_loc (loc, type,
10904 TREE_OPERAND (arg0, 0)),
10905 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10907 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10908 if (TREE_CODE (arg0) == BIT_AND_EXPR
10909 && integer_onep (TREE_OPERAND (arg0, 1))
10910 && integer_onep (arg1))
10911 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10912 build_int_cst (TREE_TYPE (arg0), 0));
10914 /* Fold (X & Y) ^ Y as ~X & Y. */
10915 if (TREE_CODE (arg0) == BIT_AND_EXPR
10916 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10918 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10919 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10920 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10921 fold_convert_loc (loc, type, arg1));
10923 /* Fold (X & Y) ^ X as ~Y & X. */
10924 if (TREE_CODE (arg0) == BIT_AND_EXPR
10925 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10926 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10928 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10929 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10930 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10931 fold_convert_loc (loc, type, arg1));
10933 /* Fold X ^ (X & Y) as X & ~Y. */
10934 if (TREE_CODE (arg1) == BIT_AND_EXPR
10935 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10937 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10938 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10939 fold_convert_loc (loc, type, arg0),
10940 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10942 /* Fold X ^ (Y & X) as ~Y & X. */
10943 if (TREE_CODE (arg1) == BIT_AND_EXPR
10944 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10945 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10947 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10948 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10949 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10950 fold_convert_loc (loc, type, arg0));
10953 /* See if this can be simplified into a rotate first. If that
10954 is unsuccessful continue in the association code. */
10958 if (integer_all_onesp (arg1))
10959 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10960 if (integer_zerop (arg1))
10961 return omit_one_operand_loc (loc, type, arg1, arg0);
10962 if (operand_equal_p (arg0, arg1, 0))
10963 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10965 /* ~X & X is always zero. */
10966 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10968 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10970 /* X & ~X is always zero. */
10971 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10972 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10973 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10975 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10976 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10977 && TREE_CODE (arg1) == INTEGER_CST
10978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10980 tree tmp1 = fold_convert_loc (loc, type, arg1);
10981 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10982 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10983 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10984 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10986 fold_convert_loc (loc, type,
10987 fold_build2_loc (loc, BIT_IOR_EXPR,
10988 type, tmp2, tmp3));
10991 /* (X | Y) & Y is (X, Y). */
10992 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10993 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10994 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10995 /* (X | Y) & X is (Y, X). */
10996 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10997 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10998 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10999 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11000 /* X & (X | Y) is (Y, X). */
11001 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11002 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11003 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11004 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11005 /* X & (Y | X) is (Y, X). */
11006 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11007 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11008 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11009 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11011 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11012 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11013 && integer_onep (TREE_OPERAND (arg0, 1))
11014 && integer_onep (arg1))
11016 tem = TREE_OPERAND (arg0, 0);
11017 return fold_build2_loc (loc, EQ_EXPR, type,
11018 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11019 build_int_cst (TREE_TYPE (tem), 1)),
11020 build_int_cst (TREE_TYPE (tem), 0));
11022 /* Fold ~X & 1 as (X & 1) == 0. */
11023 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11024 && integer_onep (arg1))
11026 tem = TREE_OPERAND (arg0, 0);
11027 return fold_build2_loc (loc, EQ_EXPR, type,
11028 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11029 build_int_cst (TREE_TYPE (tem), 1)),
11030 build_int_cst (TREE_TYPE (tem), 0));
11033 /* Fold (X ^ Y) & Y as ~X & Y. */
11034 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11035 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11037 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11038 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11039 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11040 fold_convert_loc (loc, type, arg1));
11042 /* Fold (X ^ Y) & X as ~Y & X. */
11043 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11044 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11045 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11047 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11048 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11049 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11050 fold_convert_loc (loc, type, arg1));
11052 /* Fold X & (X ^ Y) as X & ~Y. */
11053 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11054 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11056 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11057 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11058 fold_convert_loc (loc, type, arg0),
11059 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11061 /* Fold X & (Y ^ X) as ~Y & X. */
11062 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11063 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11064 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11066 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11067 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11068 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11069 fold_convert_loc (loc, type, arg0));
11072 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11073 if (t1 != NULL_TREE)
11075 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11076 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11077 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11080 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11082 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11083 && (~TREE_INT_CST_LOW (arg1)
11084 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11086 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11089 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11091 This results in more efficient code for machines without a NOR
11092 instruction. Combine will canonicalize to the first form
11093 which will allow use of NOR instructions provided by the
11094 backend if they exist. */
11095 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11096 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11098 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11099 build2 (BIT_IOR_EXPR, type,
11100 fold_convert_loc (loc, type,
11101 TREE_OPERAND (arg0, 0)),
11102 fold_convert_loc (loc, type,
11103 TREE_OPERAND (arg1, 0))));
11106 /* If arg0 is derived from the address of an object or function, we may
11107 be able to fold this expression using the object or function's
11109 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11111 unsigned HOST_WIDE_INT modulus, residue;
11112 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11114 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11115 integer_onep (arg1));
11117 /* This works because modulus is a power of 2. If this weren't the
11118 case, we'd have to replace it by its greatest power-of-2
11119 divisor: modulus & -modulus. */
11121 return build_int_cst (type, residue & low);
11124 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11125 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11126 if the new mask might be further optimized. */
11127 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11128 || TREE_CODE (arg0) == RSHIFT_EXPR)
11129 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11130 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11131 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11132 < TYPE_PRECISION (TREE_TYPE (arg0))
11133 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11134 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11136 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11137 unsigned HOST_WIDE_INT mask
11138 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11139 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11140 tree shift_type = TREE_TYPE (arg0);
11142 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11143 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11144 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11145 && TYPE_PRECISION (TREE_TYPE (arg0))
11146 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11148 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11149 tree arg00 = TREE_OPERAND (arg0, 0);
11150 /* See if more bits can be proven as zero because of
11152 if (TREE_CODE (arg00) == NOP_EXPR
11153 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11155 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11156 if (TYPE_PRECISION (inner_type)
11157 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11158 && TYPE_PRECISION (inner_type) < prec)
11160 prec = TYPE_PRECISION (inner_type);
11161 /* See if we can shorten the right shift. */
11163 shift_type = inner_type;
11166 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11167 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11168 zerobits <<= prec - shiftc;
11169 /* For arithmetic shift if sign bit could be set, zerobits
11170 can contain actually sign bits, so no transformation is
11171 possible, unless MASK masks them all away. In that
11172 case the shift needs to be converted into logical shift. */
11173 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11174 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11176 if ((mask & zerobits) == 0)
11177 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11183 /* ((X << 16) & 0xff00) is (X, 0). */
11184 if ((mask & zerobits) == mask)
11185 return omit_one_operand_loc (loc, type,
11186 build_int_cst (type, 0), arg0);
11188 newmask = mask | zerobits;
11189 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11193 /* Only do the transformation if NEWMASK is some integer
11195 for (prec = BITS_PER_UNIT;
11196 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11197 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11199 if (prec < HOST_BITS_PER_WIDE_INT
11200 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11204 if (shift_type != TREE_TYPE (arg0))
11206 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11207 fold_convert_loc (loc, shift_type,
11208 TREE_OPERAND (arg0, 0)),
11209 TREE_OPERAND (arg0, 1));
11210 tem = fold_convert_loc (loc, type, tem);
11214 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11215 if (!tree_int_cst_equal (newmaskt, arg1))
11216 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11224 /* Don't touch a floating-point divide by zero unless the mode
11225 of the constant can represent infinity. */
11226 if (TREE_CODE (arg1) == REAL_CST
11227 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11228 && real_zerop (arg1))
11231 /* Optimize A / A to 1.0 if we don't care about
11232 NaNs or Infinities. Skip the transformation
11233 for non-real operands. */
11234 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11235 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11236 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11237 && operand_equal_p (arg0, arg1, 0))
11239 tree r = build_real (TREE_TYPE (arg0), dconst1);
11241 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11244 /* The complex version of the above A / A optimization. */
11245 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11246 && operand_equal_p (arg0, arg1, 0))
11248 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11249 if (! HONOR_NANS (TYPE_MODE (elem_type))
11250 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11252 tree r = build_real (elem_type, dconst1);
11253 /* omit_two_operands will call fold_convert for us. */
11254 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11258 /* (-A) / (-B) -> A / B */
11259 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11260 return fold_build2_loc (loc, RDIV_EXPR, type,
11261 TREE_OPERAND (arg0, 0),
11262 negate_expr (arg1));
11263 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11264 return fold_build2_loc (loc, RDIV_EXPR, type,
11265 negate_expr (arg0),
11266 TREE_OPERAND (arg1, 0));
11268 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11269 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11270 && real_onep (arg1))
11271 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11273 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11274 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11275 && real_minus_onep (arg1))
11276 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11277 negate_expr (arg0)));
11279 /* If ARG1 is a constant, we can convert this to a multiply by the
11280 reciprocal. This does not have the same rounding properties,
11281 so only do this if -freciprocal-math. We can actually
11282 always safely do it if ARG1 is a power of two, but it's hard to
11283 tell if it is or not in a portable manner. */
11284 if (TREE_CODE (arg1) == REAL_CST)
11286 if (flag_reciprocal_math
11287 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11289 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11290 /* Find the reciprocal if optimizing and the result is exact. */
11294 r = TREE_REAL_CST (arg1);
11295 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11297 tem = build_real (type, r);
11298 return fold_build2_loc (loc, MULT_EXPR, type,
11299 fold_convert_loc (loc, type, arg0), tem);
11303 /* Convert A/B/C to A/(B*C). */
11304 if (flag_reciprocal_math
11305 && TREE_CODE (arg0) == RDIV_EXPR)
11306 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11307 fold_build2_loc (loc, MULT_EXPR, type,
11308 TREE_OPERAND (arg0, 1), arg1));
11310 /* Convert A/(B/C) to (A/B)*C. */
11311 if (flag_reciprocal_math
11312 && TREE_CODE (arg1) == RDIV_EXPR)
11313 return fold_build2_loc (loc, MULT_EXPR, type,
11314 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11315 TREE_OPERAND (arg1, 0)),
11316 TREE_OPERAND (arg1, 1));
11318 /* Convert C1/(X*C2) into (C1/C2)/X. */
11319 if (flag_reciprocal_math
11320 && TREE_CODE (arg1) == MULT_EXPR
11321 && TREE_CODE (arg0) == REAL_CST
11322 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11324 tree tem = const_binop (RDIV_EXPR, arg0,
11325 TREE_OPERAND (arg1, 1));
11327 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11328 TREE_OPERAND (arg1, 0));
11331 if (flag_unsafe_math_optimizations)
11333 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11334 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11336 /* Optimize sin(x)/cos(x) as tan(x). */
11337 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11338 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11339 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11340 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11341 CALL_EXPR_ARG (arg1, 0), 0))
11343 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11345 if (tanfn != NULL_TREE)
11346 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11349 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11350 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11351 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11352 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11353 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11354 CALL_EXPR_ARG (arg1, 0), 0))
11356 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11358 if (tanfn != NULL_TREE)
11360 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11361 CALL_EXPR_ARG (arg0, 0));
11362 return fold_build2_loc (loc, RDIV_EXPR, type,
11363 build_real (type, dconst1), tmp);
11367 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11368 NaNs or Infinities. */
11369 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11370 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11371 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11373 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11374 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11376 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11377 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11378 && operand_equal_p (arg00, arg01, 0))
11380 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11382 if (cosfn != NULL_TREE)
11383 return build_call_expr_loc (loc, cosfn, 1, arg00);
11387 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11388 NaNs or Infinities. */
11389 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11390 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11391 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11393 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11394 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11396 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11397 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11398 && operand_equal_p (arg00, arg01, 0))
11400 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11402 if (cosfn != NULL_TREE)
11404 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11405 return fold_build2_loc (loc, RDIV_EXPR, type,
11406 build_real (type, dconst1),
11412 /* Optimize pow(x,c)/x as pow(x,c-1). */
11413 if (fcode0 == BUILT_IN_POW
11414 || fcode0 == BUILT_IN_POWF
11415 || fcode0 == BUILT_IN_POWL)
11417 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11418 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11419 if (TREE_CODE (arg01) == REAL_CST
11420 && !TREE_OVERFLOW (arg01)
11421 && operand_equal_p (arg1, arg00, 0))
11423 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11427 c = TREE_REAL_CST (arg01);
11428 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11429 arg = build_real (type, c);
11430 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11434 /* Optimize a/root(b/c) into a*root(c/b). */
11435 if (BUILTIN_ROOT_P (fcode1))
11437 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11439 if (TREE_CODE (rootarg) == RDIV_EXPR)
11441 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11442 tree b = TREE_OPERAND (rootarg, 0);
11443 tree c = TREE_OPERAND (rootarg, 1);
11445 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11447 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11448 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11452 /* Optimize x/expN(y) into x*expN(-y). */
11453 if (BUILTIN_EXPONENT_P (fcode1))
11455 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11456 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11457 arg1 = build_call_expr_loc (loc,
11459 fold_convert_loc (loc, type, arg));
11460 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11463 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11464 if (fcode1 == BUILT_IN_POW
11465 || fcode1 == BUILT_IN_POWF
11466 || fcode1 == BUILT_IN_POWL)
11468 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11469 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11470 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11471 tree neg11 = fold_convert_loc (loc, type,
11472 negate_expr (arg11));
11473 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11474 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11479 case TRUNC_DIV_EXPR:
11480 case FLOOR_DIV_EXPR:
11481 /* Simplify A / (B << N) where A and B are positive and B is
11482 a power of 2, to A >> (N + log2(B)). */
11483 strict_overflow_p = false;
11484 if (TREE_CODE (arg1) == LSHIFT_EXPR
11485 && (TYPE_UNSIGNED (type)
11486 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11488 tree sval = TREE_OPERAND (arg1, 0);
11489 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11491 tree sh_cnt = TREE_OPERAND (arg1, 1);
11492 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11494 if (strict_overflow_p)
11495 fold_overflow_warning (("assuming signed overflow does not "
11496 "occur when simplifying A / (B << N)"),
11497 WARN_STRICT_OVERFLOW_MISC);
11499 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11500 sh_cnt, build_int_cst (NULL_TREE, pow2));
11501 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11502 fold_convert_loc (loc, type, arg0), sh_cnt);
11506 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11507 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11508 if (INTEGRAL_TYPE_P (type)
11509 && TYPE_UNSIGNED (type)
11510 && code == FLOOR_DIV_EXPR)
11511 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11515 case ROUND_DIV_EXPR:
11516 case CEIL_DIV_EXPR:
11517 case EXACT_DIV_EXPR:
11518 if (integer_onep (arg1))
11519 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11520 if (integer_zerop (arg1))
11522 /* X / -1 is -X. */
11523 if (!TYPE_UNSIGNED (type)
11524 && TREE_CODE (arg1) == INTEGER_CST
11525 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11526 && TREE_INT_CST_HIGH (arg1) == -1)
11527 return fold_convert_loc (loc, type, negate_expr (arg0));
11529 /* Convert -A / -B to A / B when the type is signed and overflow is
11531 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11532 && TREE_CODE (arg0) == NEGATE_EXPR
11533 && negate_expr_p (arg1))
11535 if (INTEGRAL_TYPE_P (type))
11536 fold_overflow_warning (("assuming signed overflow does not occur "
11537 "when distributing negation across "
11539 WARN_STRICT_OVERFLOW_MISC);
11540 return fold_build2_loc (loc, code, type,
11541 fold_convert_loc (loc, type,
11542 TREE_OPERAND (arg0, 0)),
11543 fold_convert_loc (loc, type,
11544 negate_expr (arg1)));
11546 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11547 && TREE_CODE (arg1) == NEGATE_EXPR
11548 && negate_expr_p (arg0))
11550 if (INTEGRAL_TYPE_P (type))
11551 fold_overflow_warning (("assuming signed overflow does not occur "
11552 "when distributing negation across "
11554 WARN_STRICT_OVERFLOW_MISC);
11555 return fold_build2_loc (loc, code, type,
11556 fold_convert_loc (loc, type,
11557 negate_expr (arg0)),
11558 fold_convert_loc (loc, type,
11559 TREE_OPERAND (arg1, 0)));
11562 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11563 operation, EXACT_DIV_EXPR.
11565 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11566 At one time others generated faster code, it's not clear if they do
11567 after the last round to changes to the DIV code in expmed.c. */
11568 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11569 && multiple_of_p (type, arg0, arg1))
11570 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11572 strict_overflow_p = false;
11573 if (TREE_CODE (arg1) == INTEGER_CST
11574 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11575 &strict_overflow_p)))
11577 if (strict_overflow_p)
11578 fold_overflow_warning (("assuming signed overflow does not occur "
11579 "when simplifying division"),
11580 WARN_STRICT_OVERFLOW_MISC);
11581 return fold_convert_loc (loc, type, tem);
11586 case CEIL_MOD_EXPR:
11587 case FLOOR_MOD_EXPR:
11588 case ROUND_MOD_EXPR:
11589 case TRUNC_MOD_EXPR:
11590 /* X % 1 is always zero, but be sure to preserve any side
11592 if (integer_onep (arg1))
11593 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11595 /* X % 0, return X % 0 unchanged so that we can get the
11596 proper warnings and errors. */
11597 if (integer_zerop (arg1))
11600 /* 0 % X is always zero, but be sure to preserve any side
11601 effects in X. Place this after checking for X == 0. */
11602 if (integer_zerop (arg0))
11603 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11605 /* X % -1 is zero. */
11606 if (!TYPE_UNSIGNED (type)
11607 && TREE_CODE (arg1) == INTEGER_CST
11608 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11609 && TREE_INT_CST_HIGH (arg1) == -1)
11610 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11612 /* X % -C is the same as X % C. */
11613 if (code == TRUNC_MOD_EXPR
11614 && !TYPE_UNSIGNED (type)
11615 && TREE_CODE (arg1) == INTEGER_CST
11616 && !TREE_OVERFLOW (arg1)
11617 && TREE_INT_CST_HIGH (arg1) < 0
11618 && !TYPE_OVERFLOW_TRAPS (type)
11619 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11620 && !sign_bit_p (arg1, arg1))
11621 return fold_build2_loc (loc, code, type,
11622 fold_convert_loc (loc, type, arg0),
11623 fold_convert_loc (loc, type,
11624 negate_expr (arg1)));
11626 /* X % -Y is the same as X % Y. */
11627 if (code == TRUNC_MOD_EXPR
11628 && !TYPE_UNSIGNED (type)
11629 && TREE_CODE (arg1) == NEGATE_EXPR
11630 && !TYPE_OVERFLOW_TRAPS (type))
11631 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11632 fold_convert_loc (loc, type,
11633 TREE_OPERAND (arg1, 0)));
11635 strict_overflow_p = false;
11636 if (TREE_CODE (arg1) == INTEGER_CST
11637 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11638 &strict_overflow_p)))
11640 if (strict_overflow_p)
11641 fold_overflow_warning (("assuming signed overflow does not occur "
11642 "when simplifying modulus"),
11643 WARN_STRICT_OVERFLOW_MISC);
11644 return fold_convert_loc (loc, type, tem);
11647 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11648 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11649 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11650 && (TYPE_UNSIGNED (type)
11651 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11654 /* Also optimize A % (C << N) where C is a power of 2,
11655 to A & ((C << N) - 1). */
11656 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11657 c = TREE_OPERAND (arg1, 0);
11659 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11662 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11663 build_int_cst (TREE_TYPE (arg1), 1));
11664 if (strict_overflow_p)
11665 fold_overflow_warning (("assuming signed overflow does not "
11666 "occur when simplifying "
11667 "X % (power of two)"),
11668 WARN_STRICT_OVERFLOW_MISC);
11669 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11670 fold_convert_loc (loc, type, arg0),
11671 fold_convert_loc (loc, type, mask));
11679 if (integer_all_onesp (arg0))
11680 return omit_one_operand_loc (loc, type, arg0, arg1);
11684 /* Optimize -1 >> x for arithmetic right shifts. */
11685 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11686 && tree_expr_nonnegative_p (arg1))
11687 return omit_one_operand_loc (loc, type, arg0, arg1);
11688 /* ... fall through ... */
11692 if (integer_zerop (arg1))
11693 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11694 if (integer_zerop (arg0))
11695 return omit_one_operand_loc (loc, type, arg0, arg1);
11697 /* Since negative shift count is not well-defined,
11698 don't try to compute it in the compiler. */
11699 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11702 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11703 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11704 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11705 && host_integerp (TREE_OPERAND (arg0, 1), false)
11706 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11708 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11709 + TREE_INT_CST_LOW (arg1));
11711 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11712 being well defined. */
11713 if (low >= TYPE_PRECISION (type))
11715 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11716 low = low % TYPE_PRECISION (type);
11717 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11718 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11719 TREE_OPERAND (arg0, 0));
11721 low = TYPE_PRECISION (type) - 1;
11724 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11725 build_int_cst (type, low));
11728 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11729 into x & ((unsigned)-1 >> c) for unsigned types. */
11730 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11731 || (TYPE_UNSIGNED (type)
11732 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11733 && host_integerp (arg1, false)
11734 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11735 && host_integerp (TREE_OPERAND (arg0, 1), false)
11736 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11738 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11739 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11745 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11747 lshift = build_int_cst (type, -1);
11748 lshift = int_const_binop (code, lshift, arg1, 0);
11750 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11754 /* Rewrite an LROTATE_EXPR by a constant into an
11755 RROTATE_EXPR by a new constant. */
11756 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11758 tree tem = build_int_cst (TREE_TYPE (arg1),
11759 TYPE_PRECISION (type));
11760 tem = const_binop (MINUS_EXPR, tem, arg1);
11761 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11764 /* If we have a rotate of a bit operation with the rotate count and
11765 the second operand of the bit operation both constant,
11766 permute the two operations. */
11767 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11768 && (TREE_CODE (arg0) == BIT_AND_EXPR
11769 || TREE_CODE (arg0) == BIT_IOR_EXPR
11770 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11771 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11772 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11773 fold_build2_loc (loc, code, type,
11774 TREE_OPERAND (arg0, 0), arg1),
11775 fold_build2_loc (loc, code, type,
11776 TREE_OPERAND (arg0, 1), arg1));
11778 /* Two consecutive rotates adding up to the precision of the
11779 type can be ignored. */
11780 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11781 && TREE_CODE (arg0) == RROTATE_EXPR
11782 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11783 && TREE_INT_CST_HIGH (arg1) == 0
11784 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11785 && ((TREE_INT_CST_LOW (arg1)
11786 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11787 == (unsigned int) TYPE_PRECISION (type)))
11788 return TREE_OPERAND (arg0, 0);
11790 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11791 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11792 if the latter can be further optimized. */
11793 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11794 && TREE_CODE (arg0) == BIT_AND_EXPR
11795 && TREE_CODE (arg1) == INTEGER_CST
11796 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11798 tree mask = fold_build2_loc (loc, code, type,
11799 fold_convert_loc (loc, type,
11800 TREE_OPERAND (arg0, 1)),
11802 tree shift = fold_build2_loc (loc, code, type,
11803 fold_convert_loc (loc, type,
11804 TREE_OPERAND (arg0, 0)),
11806 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11814 if (operand_equal_p (arg0, arg1, 0))
11815 return omit_one_operand_loc (loc, type, arg0, arg1);
11816 if (INTEGRAL_TYPE_P (type)
11817 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11818 return omit_one_operand_loc (loc, type, arg1, arg0);
11819 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11825 if (operand_equal_p (arg0, arg1, 0))
11826 return omit_one_operand_loc (loc, type, arg0, arg1);
11827 if (INTEGRAL_TYPE_P (type)
11828 && TYPE_MAX_VALUE (type)
11829 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11830 return omit_one_operand_loc (loc, type, arg1, arg0);
11831 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11836 case TRUTH_ANDIF_EXPR:
11837 /* Note that the operands of this must be ints
11838 and their values must be 0 or 1.
11839 ("true" is a fixed value perhaps depending on the language.) */
11840 /* If first arg is constant zero, return it. */
11841 if (integer_zerop (arg0))
11842 return fold_convert_loc (loc, type, arg0);
11843 case TRUTH_AND_EXPR:
11844 /* If either arg is constant true, drop it. */
11845 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11846 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11847 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11848 /* Preserve sequence points. */
11849 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11850 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11851 /* If second arg is constant zero, result is zero, but first arg
11852 must be evaluated. */
11853 if (integer_zerop (arg1))
11854 return omit_one_operand_loc (loc, type, arg1, arg0);
11855 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11856 case will be handled here. */
11857 if (integer_zerop (arg0))
11858 return omit_one_operand_loc (loc, type, arg0, arg1);
11860 /* !X && X is always false. */
11861 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11862 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11863 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11864 /* X && !X is always false. */
11865 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11866 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11867 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11869 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11870 means A >= Y && A != MAX, but in this case we know that
11873 if (!TREE_SIDE_EFFECTS (arg0)
11874 && !TREE_SIDE_EFFECTS (arg1))
11876 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11877 if (tem && !operand_equal_p (tem, arg0, 0))
11878 return fold_build2_loc (loc, code, type, tem, arg1);
11880 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11881 if (tem && !operand_equal_p (tem, arg1, 0))
11882 return fold_build2_loc (loc, code, type, arg0, tem);
11886 /* We only do these simplifications if we are optimizing. */
11890 /* Check for things like (A || B) && (A || C). We can convert this
11891 to A || (B && C). Note that either operator can be any of the four
11892 truth and/or operations and the transformation will still be
11893 valid. Also note that we only care about order for the
11894 ANDIF and ORIF operators. If B contains side effects, this
11895 might change the truth-value of A. */
11896 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11897 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11898 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11899 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11900 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11901 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11903 tree a00 = TREE_OPERAND (arg0, 0);
11904 tree a01 = TREE_OPERAND (arg0, 1);
11905 tree a10 = TREE_OPERAND (arg1, 0);
11906 tree a11 = TREE_OPERAND (arg1, 1);
11907 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11908 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11909 && (code == TRUTH_AND_EXPR
11910 || code == TRUTH_OR_EXPR));
11912 if (operand_equal_p (a00, a10, 0))
11913 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11914 fold_build2_loc (loc, code, type, a01, a11));
11915 else if (commutative && operand_equal_p (a00, a11, 0))
11916 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11917 fold_build2_loc (loc, code, type, a01, a10));
11918 else if (commutative && operand_equal_p (a01, a10, 0))
11919 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11920 fold_build2_loc (loc, code, type, a00, a11));
11922 /* This case if tricky because we must either have commutative
11923 operators or else A10 must not have side-effects. */
11925 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11926 && operand_equal_p (a01, a11, 0))
11927 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11928 fold_build2_loc (loc, code, type, a00, a10),
11932 /* See if we can build a range comparison. */
11933 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11936 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11937 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11939 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11941 return fold_build2_loc (loc, code, type, tem, arg1);
11944 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11945 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11947 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11949 return fold_build2_loc (loc, code, type, arg0, tem);
11952 /* Check for the possibility of merging component references. If our
11953 lhs is another similar operation, try to merge its rhs with our
11954 rhs. Then try to merge our lhs and rhs. */
11955 if (TREE_CODE (arg0) == code
11956 && 0 != (tem = fold_truthop (loc, code, type,
11957 TREE_OPERAND (arg0, 1), arg1)))
11958 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
11960 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
11965 case TRUTH_ORIF_EXPR:
11966 /* Note that the operands of this must be ints
11967 and their values must be 0 or true.
11968 ("true" is a fixed value perhaps depending on the language.) */
11969 /* If first arg is constant true, return it. */
11970 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11971 return fold_convert_loc (loc, type, arg0);
11972 case TRUTH_OR_EXPR:
11973 /* If either arg is constant zero, drop it. */
11974 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11975 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11976 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11977 /* Preserve sequence points. */
11978 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11979 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11980 /* If second arg is constant true, result is true, but we must
11981 evaluate first arg. */
11982 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11983 return omit_one_operand_loc (loc, type, arg1, arg0);
11984 /* Likewise for first arg, but note this only occurs here for
11986 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11987 return omit_one_operand_loc (loc, type, arg0, arg1);
11989 /* !X || X is always true. */
11990 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11991 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11992 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11993 /* X || !X is always true. */
11994 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11995 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11996 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12000 case TRUTH_XOR_EXPR:
12001 /* If the second arg is constant zero, drop it. */
12002 if (integer_zerop (arg1))
12003 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12004 /* If the second arg is constant true, this is a logical inversion. */
12005 if (integer_onep (arg1))
12007 /* Only call invert_truthvalue if operand is a truth value. */
12008 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12009 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12011 tem = invert_truthvalue_loc (loc, arg0);
12012 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12014 /* Identical arguments cancel to zero. */
12015 if (operand_equal_p (arg0, arg1, 0))
12016 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12018 /* !X ^ X is always true. */
12019 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12020 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12021 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12023 /* X ^ !X is always true. */
12024 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12025 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12026 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12032 tem = fold_comparison (loc, code, type, op0, op1);
12033 if (tem != NULL_TREE)
12036 /* bool_var != 0 becomes bool_var. */
12037 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12038 && code == NE_EXPR)
12039 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12041 /* bool_var == 1 becomes bool_var. */
12042 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12043 && code == EQ_EXPR)
12044 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12046 /* bool_var != 1 becomes !bool_var. */
12047 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12048 && code == NE_EXPR)
12049 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12050 fold_convert_loc (loc, type, arg0));
12052 /* bool_var == 0 becomes !bool_var. */
12053 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12054 && code == EQ_EXPR)
12055 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12056 fold_convert_loc (loc, type, arg0));
12058 /* !exp != 0 becomes !exp */
12059 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12060 && code == NE_EXPR)
12061 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12063 /* If this is an equality comparison of the address of two non-weak,
12064 unaliased symbols neither of which are extern (since we do not
12065 have access to attributes for externs), then we know the result. */
12066 if (TREE_CODE (arg0) == ADDR_EXPR
12067 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12068 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12069 && ! lookup_attribute ("alias",
12070 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12071 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12072 && TREE_CODE (arg1) == ADDR_EXPR
12073 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12074 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12075 && ! lookup_attribute ("alias",
12076 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12077 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12079 /* We know that we're looking at the address of two
12080 non-weak, unaliased, static _DECL nodes.
12082 It is both wasteful and incorrect to call operand_equal_p
12083 to compare the two ADDR_EXPR nodes. It is wasteful in that
12084 all we need to do is test pointer equality for the arguments
12085 to the two ADDR_EXPR nodes. It is incorrect to use
12086 operand_equal_p as that function is NOT equivalent to a
12087 C equality test. It can in fact return false for two
12088 objects which would test as equal using the C equality
12090 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12091 return constant_boolean_node (equal
12092 ? code == EQ_EXPR : code != EQ_EXPR,
12096 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12097 a MINUS_EXPR of a constant, we can convert it into a comparison with
12098 a revised constant as long as no overflow occurs. */
12099 if (TREE_CODE (arg1) == INTEGER_CST
12100 && (TREE_CODE (arg0) == PLUS_EXPR
12101 || TREE_CODE (arg0) == MINUS_EXPR)
12102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12103 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12104 ? MINUS_EXPR : PLUS_EXPR,
12105 fold_convert_loc (loc, TREE_TYPE (arg0),
12107 TREE_OPERAND (arg0, 1)))
12108 && !TREE_OVERFLOW (tem))
12109 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12111 /* Similarly for a NEGATE_EXPR. */
12112 if (TREE_CODE (arg0) == NEGATE_EXPR
12113 && TREE_CODE (arg1) == INTEGER_CST
12114 && 0 != (tem = negate_expr (arg1))
12115 && TREE_CODE (tem) == INTEGER_CST
12116 && !TREE_OVERFLOW (tem))
12117 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12119 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12120 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12121 && TREE_CODE (arg1) == INTEGER_CST
12122 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12123 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12124 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12125 fold_convert_loc (loc,
12128 TREE_OPERAND (arg0, 1)));
12130 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12131 if ((TREE_CODE (arg0) == PLUS_EXPR
12132 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12133 || TREE_CODE (arg0) == MINUS_EXPR)
12134 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12135 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12136 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12138 tree val = TREE_OPERAND (arg0, 1);
12139 return omit_two_operands_loc (loc, type,
12140 fold_build2_loc (loc, code, type,
12142 build_int_cst (TREE_TYPE (val),
12144 TREE_OPERAND (arg0, 0), arg1);
12147 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12148 if (TREE_CODE (arg0) == MINUS_EXPR
12149 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12150 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12151 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12153 return omit_two_operands_loc (loc, type,
12155 ? boolean_true_node : boolean_false_node,
12156 TREE_OPERAND (arg0, 1), arg1);
12159 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12160 for !=. Don't do this for ordered comparisons due to overflow. */
12161 if (TREE_CODE (arg0) == MINUS_EXPR
12162 && integer_zerop (arg1))
12163 return fold_build2_loc (loc, code, type,
12164 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12166 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12167 if (TREE_CODE (arg0) == ABS_EXPR
12168 && (integer_zerop (arg1) || real_zerop (arg1)))
12169 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12171 /* If this is an EQ or NE comparison with zero and ARG0 is
12172 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12173 two operations, but the latter can be done in one less insn
12174 on machines that have only two-operand insns or on which a
12175 constant cannot be the first operand. */
12176 if (TREE_CODE (arg0) == BIT_AND_EXPR
12177 && integer_zerop (arg1))
12179 tree arg00 = TREE_OPERAND (arg0, 0);
12180 tree arg01 = TREE_OPERAND (arg0, 1);
12181 if (TREE_CODE (arg00) == LSHIFT_EXPR
12182 && integer_onep (TREE_OPERAND (arg00, 0)))
12184 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12185 arg01, TREE_OPERAND (arg00, 1));
12186 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12187 build_int_cst (TREE_TYPE (arg0), 1));
12188 return fold_build2_loc (loc, code, type,
12189 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12192 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12193 && integer_onep (TREE_OPERAND (arg01, 0)))
12195 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12196 arg00, TREE_OPERAND (arg01, 1));
12197 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12198 build_int_cst (TREE_TYPE (arg0), 1));
12199 return fold_build2_loc (loc, code, type,
12200 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12205 /* If this is an NE or EQ comparison of zero against the result of a
12206 signed MOD operation whose second operand is a power of 2, make
12207 the MOD operation unsigned since it is simpler and equivalent. */
12208 if (integer_zerop (arg1)
12209 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12210 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12211 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12212 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12213 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12214 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12216 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12217 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12218 fold_convert_loc (loc, newtype,
12219 TREE_OPERAND (arg0, 0)),
12220 fold_convert_loc (loc, newtype,
12221 TREE_OPERAND (arg0, 1)));
12223 return fold_build2_loc (loc, code, type, newmod,
12224 fold_convert_loc (loc, newtype, arg1));
12227 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12228 C1 is a valid shift constant, and C2 is a power of two, i.e.
12230 if (TREE_CODE (arg0) == BIT_AND_EXPR
12231 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12232 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12234 && integer_pow2p (TREE_OPERAND (arg0, 1))
12235 && integer_zerop (arg1))
12237 tree itype = TREE_TYPE (arg0);
12238 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12239 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12241 /* Check for a valid shift count. */
12242 if (TREE_INT_CST_HIGH (arg001) == 0
12243 && TREE_INT_CST_LOW (arg001) < prec)
12245 tree arg01 = TREE_OPERAND (arg0, 1);
12246 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12247 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12248 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12249 can be rewritten as (X & (C2 << C1)) != 0. */
12250 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12252 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12253 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12254 return fold_build2_loc (loc, code, type, tem, arg1);
12256 /* Otherwise, for signed (arithmetic) shifts,
12257 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12258 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12259 else if (!TYPE_UNSIGNED (itype))
12260 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12261 arg000, build_int_cst (itype, 0));
12262 /* Otherwise, of unsigned (logical) shifts,
12263 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12264 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12266 return omit_one_operand_loc (loc, type,
12267 code == EQ_EXPR ? integer_one_node
12268 : integer_zero_node,
12273 /* If this is an NE comparison of zero with an AND of one, remove the
12274 comparison since the AND will give the correct value. */
12275 if (code == NE_EXPR
12276 && integer_zerop (arg1)
12277 && TREE_CODE (arg0) == BIT_AND_EXPR
12278 && integer_onep (TREE_OPERAND (arg0, 1)))
12279 return fold_convert_loc (loc, type, arg0);
12281 /* If we have (A & C) == C where C is a power of 2, convert this into
12282 (A & C) != 0. Similarly for NE_EXPR. */
12283 if (TREE_CODE (arg0) == BIT_AND_EXPR
12284 && integer_pow2p (TREE_OPERAND (arg0, 1))
12285 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12286 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12287 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12288 integer_zero_node));
12290 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12291 bit, then fold the expression into A < 0 or A >= 0. */
12292 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12296 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12297 Similarly for NE_EXPR. */
12298 if (TREE_CODE (arg0) == BIT_AND_EXPR
12299 && TREE_CODE (arg1) == INTEGER_CST
12300 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12302 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12303 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12304 TREE_OPERAND (arg0, 1));
12305 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12307 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12308 if (integer_nonzerop (dandnotc))
12309 return omit_one_operand_loc (loc, type, rslt, arg0);
12312 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12313 Similarly for NE_EXPR. */
12314 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12315 && TREE_CODE (arg1) == INTEGER_CST
12316 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12318 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12319 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12320 TREE_OPERAND (arg0, 1), notd);
12321 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12322 if (integer_nonzerop (candnotd))
12323 return omit_one_operand_loc (loc, type, rslt, arg0);
12326 /* If this is a comparison of a field, we may be able to simplify it. */
12327 if ((TREE_CODE (arg0) == COMPONENT_REF
12328 || TREE_CODE (arg0) == BIT_FIELD_REF)
12329 /* Handle the constant case even without -O
12330 to make sure the warnings are given. */
12331 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12333 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12338 /* Optimize comparisons of strlen vs zero to a compare of the
12339 first character of the string vs zero. To wit,
12340 strlen(ptr) == 0 => *ptr == 0
12341 strlen(ptr) != 0 => *ptr != 0
12342 Other cases should reduce to one of these two (or a constant)
12343 due to the return value of strlen being unsigned. */
12344 if (TREE_CODE (arg0) == CALL_EXPR
12345 && integer_zerop (arg1))
12347 tree fndecl = get_callee_fndecl (arg0);
12350 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12351 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12352 && call_expr_nargs (arg0) == 1
12353 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12355 tree iref = build_fold_indirect_ref_loc (loc,
12356 CALL_EXPR_ARG (arg0, 0));
12357 return fold_build2_loc (loc, code, type, iref,
12358 build_int_cst (TREE_TYPE (iref), 0));
12362 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12363 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12364 if (TREE_CODE (arg0) == RSHIFT_EXPR
12365 && integer_zerop (arg1)
12366 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12368 tree arg00 = TREE_OPERAND (arg0, 0);
12369 tree arg01 = TREE_OPERAND (arg0, 1);
12370 tree itype = TREE_TYPE (arg00);
12371 if (TREE_INT_CST_HIGH (arg01) == 0
12372 && TREE_INT_CST_LOW (arg01)
12373 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12375 if (TYPE_UNSIGNED (itype))
12377 itype = signed_type_for (itype);
12378 arg00 = fold_convert_loc (loc, itype, arg00);
12380 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12381 type, arg00, build_int_cst (itype, 0));
12385 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12386 if (integer_zerop (arg1)
12387 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12388 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12389 TREE_OPERAND (arg0, 1));
12391 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12392 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12393 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12394 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12395 build_int_cst (TREE_TYPE (arg1), 0));
12396 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12397 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12398 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12399 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12400 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12401 build_int_cst (TREE_TYPE (arg1), 0));
12403 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12404 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12405 && TREE_CODE (arg1) == INTEGER_CST
12406 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12407 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12408 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12409 TREE_OPERAND (arg0, 1), arg1));
12411 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12412 (X & C) == 0 when C is a single bit. */
12413 if (TREE_CODE (arg0) == BIT_AND_EXPR
12414 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12415 && integer_zerop (arg1)
12416 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12418 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12419 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12420 TREE_OPERAND (arg0, 1));
12421 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12425 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12426 constant C is a power of two, i.e. a single bit. */
12427 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12428 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12429 && integer_zerop (arg1)
12430 && integer_pow2p (TREE_OPERAND (arg0, 1))
12431 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12432 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12434 tree arg00 = TREE_OPERAND (arg0, 0);
12435 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12436 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12439 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12440 when is C is a power of two, i.e. a single bit. */
12441 if (TREE_CODE (arg0) == BIT_AND_EXPR
12442 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12443 && integer_zerop (arg1)
12444 && integer_pow2p (TREE_OPERAND (arg0, 1))
12445 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12446 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12448 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12449 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12450 arg000, TREE_OPERAND (arg0, 1));
12451 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12452 tem, build_int_cst (TREE_TYPE (tem), 0));
12455 if (integer_zerop (arg1)
12456 && tree_expr_nonzero_p (arg0))
12458 tree res = constant_boolean_node (code==NE_EXPR, type);
12459 return omit_one_operand_loc (loc, type, res, arg0);
12462 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12463 if (TREE_CODE (arg0) == NEGATE_EXPR
12464 && TREE_CODE (arg1) == NEGATE_EXPR)
12465 return fold_build2_loc (loc, code, type,
12466 TREE_OPERAND (arg0, 0),
12467 TREE_OPERAND (arg1, 0));
12469 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12470 if (TREE_CODE (arg0) == BIT_AND_EXPR
12471 && TREE_CODE (arg1) == BIT_AND_EXPR)
12473 tree arg00 = TREE_OPERAND (arg0, 0);
12474 tree arg01 = TREE_OPERAND (arg0, 1);
12475 tree arg10 = TREE_OPERAND (arg1, 0);
12476 tree arg11 = TREE_OPERAND (arg1, 1);
12477 tree itype = TREE_TYPE (arg0);
12479 if (operand_equal_p (arg01, arg11, 0))
12480 return fold_build2_loc (loc, code, type,
12481 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12482 fold_build2_loc (loc,
12483 BIT_XOR_EXPR, itype,
12486 build_int_cst (itype, 0));
12488 if (operand_equal_p (arg01, arg10, 0))
12489 return fold_build2_loc (loc, code, type,
12490 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12491 fold_build2_loc (loc,
12492 BIT_XOR_EXPR, itype,
12495 build_int_cst (itype, 0));
12497 if (operand_equal_p (arg00, arg11, 0))
12498 return fold_build2_loc (loc, code, type,
12499 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12500 fold_build2_loc (loc,
12501 BIT_XOR_EXPR, itype,
12504 build_int_cst (itype, 0));
12506 if (operand_equal_p (arg00, arg10, 0))
12507 return fold_build2_loc (loc, code, type,
12508 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12509 fold_build2_loc (loc,
12510 BIT_XOR_EXPR, itype,
12513 build_int_cst (itype, 0));
12516 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12517 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12519 tree arg00 = TREE_OPERAND (arg0, 0);
12520 tree arg01 = TREE_OPERAND (arg0, 1);
12521 tree arg10 = TREE_OPERAND (arg1, 0);
12522 tree arg11 = TREE_OPERAND (arg1, 1);
12523 tree itype = TREE_TYPE (arg0);
12525 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12526 operand_equal_p guarantees no side-effects so we don't need
12527 to use omit_one_operand on Z. */
12528 if (operand_equal_p (arg01, arg11, 0))
12529 return fold_build2_loc (loc, code, type, arg00, arg10);
12530 if (operand_equal_p (arg01, arg10, 0))
12531 return fold_build2_loc (loc, code, type, arg00, arg11);
12532 if (operand_equal_p (arg00, arg11, 0))
12533 return fold_build2_loc (loc, code, type, arg01, arg10);
12534 if (operand_equal_p (arg00, arg10, 0))
12535 return fold_build2_loc (loc, code, type, arg01, arg11);
12537 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12538 if (TREE_CODE (arg01) == INTEGER_CST
12539 && TREE_CODE (arg11) == INTEGER_CST)
12540 return fold_build2_loc (loc, code, type,
12541 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12542 fold_build2_loc (loc,
12543 BIT_XOR_EXPR, itype,
12548 /* Attempt to simplify equality/inequality comparisons of complex
12549 values. Only lower the comparison if the result is known or
12550 can be simplified to a single scalar comparison. */
12551 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12552 || TREE_CODE (arg0) == COMPLEX_CST)
12553 && (TREE_CODE (arg1) == COMPLEX_EXPR
12554 || TREE_CODE (arg1) == COMPLEX_CST))
12556 tree real0, imag0, real1, imag1;
12559 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12561 real0 = TREE_OPERAND (arg0, 0);
12562 imag0 = TREE_OPERAND (arg0, 1);
12566 real0 = TREE_REALPART (arg0);
12567 imag0 = TREE_IMAGPART (arg0);
12570 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12572 real1 = TREE_OPERAND (arg1, 0);
12573 imag1 = TREE_OPERAND (arg1, 1);
12577 real1 = TREE_REALPART (arg1);
12578 imag1 = TREE_IMAGPART (arg1);
12581 rcond = fold_binary_loc (loc, code, type, real0, real1);
12582 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12584 if (integer_zerop (rcond))
12586 if (code == EQ_EXPR)
12587 return omit_two_operands_loc (loc, type, boolean_false_node,
12589 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12593 if (code == NE_EXPR)
12594 return omit_two_operands_loc (loc, type, boolean_true_node,
12596 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12600 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12601 if (icond && TREE_CODE (icond) == INTEGER_CST)
12603 if (integer_zerop (icond))
12605 if (code == EQ_EXPR)
12606 return omit_two_operands_loc (loc, type, boolean_false_node,
12608 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12612 if (code == NE_EXPR)
12613 return omit_two_operands_loc (loc, type, boolean_true_node,
12615 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12626 tem = fold_comparison (loc, code, type, op0, op1);
12627 if (tem != NULL_TREE)
12630 /* Transform comparisons of the form X +- C CMP X. */
12631 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12632 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12633 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12634 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12635 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12636 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12638 tree arg01 = TREE_OPERAND (arg0, 1);
12639 enum tree_code code0 = TREE_CODE (arg0);
12642 if (TREE_CODE (arg01) == REAL_CST)
12643 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12645 is_positive = tree_int_cst_sgn (arg01);
12647 /* (X - c) > X becomes false. */
12648 if (code == GT_EXPR
12649 && ((code0 == MINUS_EXPR && is_positive >= 0)
12650 || (code0 == PLUS_EXPR && is_positive <= 0)))
12652 if (TREE_CODE (arg01) == INTEGER_CST
12653 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12654 fold_overflow_warning (("assuming signed overflow does not "
12655 "occur when assuming that (X - c) > X "
12656 "is always false"),
12657 WARN_STRICT_OVERFLOW_ALL);
12658 return constant_boolean_node (0, type);
12661 /* Likewise (X + c) < X becomes false. */
12662 if (code == LT_EXPR
12663 && ((code0 == PLUS_EXPR && is_positive >= 0)
12664 || (code0 == MINUS_EXPR && is_positive <= 0)))
12666 if (TREE_CODE (arg01) == INTEGER_CST
12667 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12668 fold_overflow_warning (("assuming signed overflow does not "
12669 "occur when assuming that "
12670 "(X + c) < X is always false"),
12671 WARN_STRICT_OVERFLOW_ALL);
12672 return constant_boolean_node (0, type);
12675 /* Convert (X - c) <= X to true. */
12676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12678 && ((code0 == MINUS_EXPR && is_positive >= 0)
12679 || (code0 == PLUS_EXPR && is_positive <= 0)))
12681 if (TREE_CODE (arg01) == INTEGER_CST
12682 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12683 fold_overflow_warning (("assuming signed overflow does not "
12684 "occur when assuming that "
12685 "(X - c) <= X is always true"),
12686 WARN_STRICT_OVERFLOW_ALL);
12687 return constant_boolean_node (1, type);
12690 /* Convert (X + c) >= X to true. */
12691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12693 && ((code0 == PLUS_EXPR && is_positive >= 0)
12694 || (code0 == MINUS_EXPR && is_positive <= 0)))
12696 if (TREE_CODE (arg01) == INTEGER_CST
12697 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12698 fold_overflow_warning (("assuming signed overflow does not "
12699 "occur when assuming that "
12700 "(X + c) >= X is always true"),
12701 WARN_STRICT_OVERFLOW_ALL);
12702 return constant_boolean_node (1, type);
12705 if (TREE_CODE (arg01) == INTEGER_CST)
12707 /* Convert X + c > X and X - c < X to true for integers. */
12708 if (code == GT_EXPR
12709 && ((code0 == PLUS_EXPR && is_positive > 0)
12710 || (code0 == MINUS_EXPR && is_positive < 0)))
12712 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12713 fold_overflow_warning (("assuming signed overflow does "
12714 "not occur when assuming that "
12715 "(X + c) > X is always true"),
12716 WARN_STRICT_OVERFLOW_ALL);
12717 return constant_boolean_node (1, type);
12720 if (code == LT_EXPR
12721 && ((code0 == MINUS_EXPR && is_positive > 0)
12722 || (code0 == PLUS_EXPR && is_positive < 0)))
12724 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12725 fold_overflow_warning (("assuming signed overflow does "
12726 "not occur when assuming that "
12727 "(X - c) < X is always true"),
12728 WARN_STRICT_OVERFLOW_ALL);
12729 return constant_boolean_node (1, type);
12732 /* Convert X + c <= X and X - c >= X to false for integers. */
12733 if (code == LE_EXPR
12734 && ((code0 == PLUS_EXPR && is_positive > 0)
12735 || (code0 == MINUS_EXPR && is_positive < 0)))
12737 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12738 fold_overflow_warning (("assuming signed overflow does "
12739 "not occur when assuming that "
12740 "(X + c) <= X is always false"),
12741 WARN_STRICT_OVERFLOW_ALL);
12742 return constant_boolean_node (0, type);
12745 if (code == GE_EXPR
12746 && ((code0 == MINUS_EXPR && is_positive > 0)
12747 || (code0 == PLUS_EXPR && is_positive < 0)))
12749 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12750 fold_overflow_warning (("assuming signed overflow does "
12751 "not occur when assuming that "
12752 "(X - c) >= X is always false"),
12753 WARN_STRICT_OVERFLOW_ALL);
12754 return constant_boolean_node (0, type);
12759 /* Comparisons with the highest or lowest possible integer of
12760 the specified precision will have known values. */
12762 tree arg1_type = TREE_TYPE (arg1);
12763 unsigned int width = TYPE_PRECISION (arg1_type);
12765 if (TREE_CODE (arg1) == INTEGER_CST
12766 && width <= 2 * HOST_BITS_PER_WIDE_INT
12767 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12769 HOST_WIDE_INT signed_max_hi;
12770 unsigned HOST_WIDE_INT signed_max_lo;
12771 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12773 if (width <= HOST_BITS_PER_WIDE_INT)
12775 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12780 if (TYPE_UNSIGNED (arg1_type))
12782 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12788 max_lo = signed_max_lo;
12789 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12795 width -= HOST_BITS_PER_WIDE_INT;
12796 signed_max_lo = -1;
12797 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12802 if (TYPE_UNSIGNED (arg1_type))
12804 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12809 max_hi = signed_max_hi;
12810 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12814 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12815 && TREE_INT_CST_LOW (arg1) == max_lo)
12819 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12822 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12825 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12828 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12830 /* The GE_EXPR and LT_EXPR cases above are not normally
12831 reached because of previous transformations. */
12836 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12838 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12842 arg1 = const_binop (PLUS_EXPR, arg1,
12843 build_int_cst (TREE_TYPE (arg1), 1));
12844 return fold_build2_loc (loc, EQ_EXPR, type,
12845 fold_convert_loc (loc,
12846 TREE_TYPE (arg1), arg0),
12849 arg1 = const_binop (PLUS_EXPR, arg1,
12850 build_int_cst (TREE_TYPE (arg1), 1));
12851 return fold_build2_loc (loc, NE_EXPR, type,
12852 fold_convert_loc (loc, TREE_TYPE (arg1),
12858 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12860 && TREE_INT_CST_LOW (arg1) == min_lo)
12864 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12867 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12870 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12873 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12878 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12880 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12884 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12885 return fold_build2_loc (loc, NE_EXPR, type,
12886 fold_convert_loc (loc,
12887 TREE_TYPE (arg1), arg0),
12890 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12891 return fold_build2_loc (loc, EQ_EXPR, type,
12892 fold_convert_loc (loc, TREE_TYPE (arg1),
12899 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12900 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12901 && TYPE_UNSIGNED (arg1_type)
12902 /* We will flip the signedness of the comparison operator
12903 associated with the mode of arg1, so the sign bit is
12904 specified by this mode. Check that arg1 is the signed
12905 max associated with this sign bit. */
12906 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12907 /* signed_type does not work on pointer types. */
12908 && INTEGRAL_TYPE_P (arg1_type))
12910 /* The following case also applies to X < signed_max+1
12911 and X >= signed_max+1 because previous transformations. */
12912 if (code == LE_EXPR || code == GT_EXPR)
12915 st = signed_type_for (TREE_TYPE (arg1));
12916 return fold_build2_loc (loc,
12917 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12918 type, fold_convert_loc (loc, st, arg0),
12919 build_int_cst (st, 0));
12925 /* If we are comparing an ABS_EXPR with a constant, we can
12926 convert all the cases into explicit comparisons, but they may
12927 well not be faster than doing the ABS and one comparison.
12928 But ABS (X) <= C is a range comparison, which becomes a subtraction
12929 and a comparison, and is probably faster. */
12930 if (code == LE_EXPR
12931 && TREE_CODE (arg1) == INTEGER_CST
12932 && TREE_CODE (arg0) == ABS_EXPR
12933 && ! TREE_SIDE_EFFECTS (arg0)
12934 && (0 != (tem = negate_expr (arg1)))
12935 && TREE_CODE (tem) == INTEGER_CST
12936 && !TREE_OVERFLOW (tem))
12937 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12938 build2 (GE_EXPR, type,
12939 TREE_OPERAND (arg0, 0), tem),
12940 build2 (LE_EXPR, type,
12941 TREE_OPERAND (arg0, 0), arg1));
12943 /* Convert ABS_EXPR<x> >= 0 to true. */
12944 strict_overflow_p = false;
12945 if (code == GE_EXPR
12946 && (integer_zerop (arg1)
12947 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12948 && real_zerop (arg1)))
12949 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12951 if (strict_overflow_p)
12952 fold_overflow_warning (("assuming signed overflow does not occur "
12953 "when simplifying comparison of "
12954 "absolute value and zero"),
12955 WARN_STRICT_OVERFLOW_CONDITIONAL);
12956 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12959 /* Convert ABS_EXPR<x> < 0 to false. */
12960 strict_overflow_p = false;
12961 if (code == LT_EXPR
12962 && (integer_zerop (arg1) || real_zerop (arg1))
12963 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12965 if (strict_overflow_p)
12966 fold_overflow_warning (("assuming signed overflow does not occur "
12967 "when simplifying comparison of "
12968 "absolute value and zero"),
12969 WARN_STRICT_OVERFLOW_CONDITIONAL);
12970 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12973 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12974 and similarly for >= into !=. */
12975 if ((code == LT_EXPR || code == GE_EXPR)
12976 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12977 && TREE_CODE (arg1) == LSHIFT_EXPR
12978 && integer_onep (TREE_OPERAND (arg1, 0)))
12980 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12981 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12982 TREE_OPERAND (arg1, 1)),
12983 build_int_cst (TREE_TYPE (arg0), 0));
12984 goto fold_binary_exit;
12987 if ((code == LT_EXPR || code == GE_EXPR)
12988 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12989 && CONVERT_EXPR_P (arg1)
12990 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12991 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12993 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12994 fold_convert_loc (loc, TREE_TYPE (arg0),
12995 build2 (RSHIFT_EXPR,
12996 TREE_TYPE (arg0), arg0,
12997 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12999 build_int_cst (TREE_TYPE (arg0), 0));
13000 goto fold_binary_exit;
13005 case UNORDERED_EXPR:
13013 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13015 t1 = fold_relational_const (code, type, arg0, arg1);
13016 if (t1 != NULL_TREE)
13020 /* If the first operand is NaN, the result is constant. */
13021 if (TREE_CODE (arg0) == REAL_CST
13022 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13023 && (code != LTGT_EXPR || ! flag_trapping_math))
13025 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13026 ? integer_zero_node
13027 : integer_one_node;
13028 return omit_one_operand_loc (loc, type, t1, arg1);
13031 /* If the second operand is NaN, the result is constant. */
13032 if (TREE_CODE (arg1) == REAL_CST
13033 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13034 && (code != LTGT_EXPR || ! flag_trapping_math))
13036 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13037 ? integer_zero_node
13038 : integer_one_node;
13039 return omit_one_operand_loc (loc, type, t1, arg0);
13042 /* Simplify unordered comparison of something with itself. */
13043 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13044 && operand_equal_p (arg0, arg1, 0))
13045 return constant_boolean_node (1, type);
13047 if (code == LTGT_EXPR
13048 && !flag_trapping_math
13049 && operand_equal_p (arg0, arg1, 0))
13050 return constant_boolean_node (0, type);
13052 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13054 tree targ0 = strip_float_extensions (arg0);
13055 tree targ1 = strip_float_extensions (arg1);
13056 tree newtype = TREE_TYPE (targ0);
13058 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13059 newtype = TREE_TYPE (targ1);
13061 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13062 return fold_build2_loc (loc, code, type,
13063 fold_convert_loc (loc, newtype, targ0),
13064 fold_convert_loc (loc, newtype, targ1));
13069 case COMPOUND_EXPR:
13070 /* When pedantic, a compound expression can be neither an lvalue
13071 nor an integer constant expression. */
13072 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13074 /* Don't let (0, 0) be null pointer constant. */
13075 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13076 : fold_convert_loc (loc, type, arg1);
13077 return pedantic_non_lvalue_loc (loc, tem);
13080 if ((TREE_CODE (arg0) == REAL_CST
13081 && TREE_CODE (arg1) == REAL_CST)
13082 || (TREE_CODE (arg0) == INTEGER_CST
13083 && TREE_CODE (arg1) == INTEGER_CST))
13084 return build_complex (type, arg0, arg1);
13088 /* An ASSERT_EXPR should never be passed to fold_binary. */
13089 gcc_unreachable ();
13093 } /* switch (code) */
13095 protected_set_expr_location (tem, loc);
13099 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13100 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13104 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13106 switch (TREE_CODE (*tp))
13112 *walk_subtrees = 0;
13114 /* ... fall through ... */
13121 /* Return whether the sub-tree ST contains a label which is accessible from
13122 outside the sub-tree. */
13125 contains_label_p (tree st)
13128 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13131 /* Fold a ternary expression of code CODE and type TYPE with operands
13132 OP0, OP1, and OP2. Return the folded expression if folding is
13133 successful. Otherwise, return NULL_TREE. */
13136 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13137 tree op0, tree op1, tree op2)
13140 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13141 enum tree_code_class kind = TREE_CODE_CLASS (code);
13143 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13144 && TREE_CODE_LENGTH (code) == 3);
13146 /* Strip any conversions that don't change the mode. This is safe
13147 for every expression, except for a comparison expression because
13148 its signedness is derived from its operands. So, in the latter
13149 case, only strip conversions that don't change the signedness.
13151 Note that this is done as an internal manipulation within the
13152 constant folder, in order to find the simplest representation of
13153 the arguments so that their form can be studied. In any cases,
13154 the appropriate type conversions should be put back in the tree
13155 that will get out of the constant folder. */
13170 case COMPONENT_REF:
13171 if (TREE_CODE (arg0) == CONSTRUCTOR
13172 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13174 unsigned HOST_WIDE_INT idx;
13176 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13183 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13184 so all simple results must be passed through pedantic_non_lvalue. */
13185 if (TREE_CODE (arg0) == INTEGER_CST)
13187 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13188 tem = integer_zerop (arg0) ? op2 : op1;
13189 /* Only optimize constant conditions when the selected branch
13190 has the same type as the COND_EXPR. This avoids optimizing
13191 away "c ? x : throw", where the throw has a void type.
13192 Avoid throwing away that operand which contains label. */
13193 if ((!TREE_SIDE_EFFECTS (unused_op)
13194 || !contains_label_p (unused_op))
13195 && (! VOID_TYPE_P (TREE_TYPE (tem))
13196 || VOID_TYPE_P (type)))
13197 return pedantic_non_lvalue_loc (loc, tem);
13200 if (operand_equal_p (arg1, op2, 0))
13201 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13203 /* If we have A op B ? A : C, we may be able to convert this to a
13204 simpler expression, depending on the operation and the values
13205 of B and C. Signed zeros prevent all of these transformations,
13206 for reasons given above each one.
13208 Also try swapping the arguments and inverting the conditional. */
13209 if (COMPARISON_CLASS_P (arg0)
13210 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13211 arg1, TREE_OPERAND (arg0, 1))
13212 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13214 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13219 if (COMPARISON_CLASS_P (arg0)
13220 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13222 TREE_OPERAND (arg0, 1))
13223 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13225 tem = fold_truth_not_expr (loc, arg0);
13226 if (tem && COMPARISON_CLASS_P (tem))
13228 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13234 /* If the second operand is simpler than the third, swap them
13235 since that produces better jump optimization results. */
13236 if (truth_value_p (TREE_CODE (arg0))
13237 && tree_swap_operands_p (op1, op2, false))
13239 /* See if this can be inverted. If it can't, possibly because
13240 it was a floating-point inequality comparison, don't do
13242 tem = fold_truth_not_expr (loc, arg0);
13244 return fold_build3_loc (loc, code, type, tem, op2, op1);
13247 /* Convert A ? 1 : 0 to simply A. */
13248 if (integer_onep (op1)
13249 && integer_zerop (op2)
13250 /* If we try to convert OP0 to our type, the
13251 call to fold will try to move the conversion inside
13252 a COND, which will recurse. In that case, the COND_EXPR
13253 is probably the best choice, so leave it alone. */
13254 && type == TREE_TYPE (arg0))
13255 return pedantic_non_lvalue_loc (loc, arg0);
13257 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13258 over COND_EXPR in cases such as floating point comparisons. */
13259 if (integer_zerop (op1)
13260 && integer_onep (op2)
13261 && truth_value_p (TREE_CODE (arg0)))
13262 return pedantic_non_lvalue_loc (loc,
13263 fold_convert_loc (loc, type,
13264 invert_truthvalue_loc (loc,
13267 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13268 if (TREE_CODE (arg0) == LT_EXPR
13269 && integer_zerop (TREE_OPERAND (arg0, 1))
13270 && integer_zerop (op2)
13271 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13273 /* sign_bit_p only checks ARG1 bits within A's precision.
13274 If <sign bit of A> has wider type than A, bits outside
13275 of A's precision in <sign bit of A> need to be checked.
13276 If they are all 0, this optimization needs to be done
13277 in unsigned A's type, if they are all 1 in signed A's type,
13278 otherwise this can't be done. */
13279 if (TYPE_PRECISION (TREE_TYPE (tem))
13280 < TYPE_PRECISION (TREE_TYPE (arg1))
13281 && TYPE_PRECISION (TREE_TYPE (tem))
13282 < TYPE_PRECISION (type))
13284 unsigned HOST_WIDE_INT mask_lo;
13285 HOST_WIDE_INT mask_hi;
13286 int inner_width, outer_width;
13289 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13290 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13291 if (outer_width > TYPE_PRECISION (type))
13292 outer_width = TYPE_PRECISION (type);
13294 if (outer_width > HOST_BITS_PER_WIDE_INT)
13296 mask_hi = ((unsigned HOST_WIDE_INT) -1
13297 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13303 mask_lo = ((unsigned HOST_WIDE_INT) -1
13304 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13306 if (inner_width > HOST_BITS_PER_WIDE_INT)
13308 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13309 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13313 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13314 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13316 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13317 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13319 tem_type = signed_type_for (TREE_TYPE (tem));
13320 tem = fold_convert_loc (loc, tem_type, tem);
13322 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13323 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13325 tem_type = unsigned_type_for (TREE_TYPE (tem));
13326 tem = fold_convert_loc (loc, tem_type, tem);
13334 fold_convert_loc (loc, type,
13335 fold_build2_loc (loc, BIT_AND_EXPR,
13336 TREE_TYPE (tem), tem,
13337 fold_convert_loc (loc,
13342 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13343 already handled above. */
13344 if (TREE_CODE (arg0) == BIT_AND_EXPR
13345 && integer_onep (TREE_OPERAND (arg0, 1))
13346 && integer_zerop (op2)
13347 && integer_pow2p (arg1))
13349 tree tem = TREE_OPERAND (arg0, 0);
13351 if (TREE_CODE (tem) == RSHIFT_EXPR
13352 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13353 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13354 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13355 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13356 TREE_OPERAND (tem, 0), arg1);
13359 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13360 is probably obsolete because the first operand should be a
13361 truth value (that's why we have the two cases above), but let's
13362 leave it in until we can confirm this for all front-ends. */
13363 if (integer_zerop (op2)
13364 && TREE_CODE (arg0) == NE_EXPR
13365 && integer_zerop (TREE_OPERAND (arg0, 1))
13366 && integer_pow2p (arg1)
13367 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13368 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13369 arg1, OEP_ONLY_CONST))
13370 return pedantic_non_lvalue_loc (loc,
13371 fold_convert_loc (loc, type,
13372 TREE_OPERAND (arg0, 0)));
13374 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13375 if (integer_zerop (op2)
13376 && truth_value_p (TREE_CODE (arg0))
13377 && truth_value_p (TREE_CODE (arg1)))
13378 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13379 fold_convert_loc (loc, type, arg0),
13382 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13383 if (integer_onep (op2)
13384 && truth_value_p (TREE_CODE (arg0))
13385 && truth_value_p (TREE_CODE (arg1)))
13387 /* Only perform transformation if ARG0 is easily inverted. */
13388 tem = fold_truth_not_expr (loc, arg0);
13390 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13391 fold_convert_loc (loc, type, tem),
13395 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13396 if (integer_zerop (arg1)
13397 && truth_value_p (TREE_CODE (arg0))
13398 && truth_value_p (TREE_CODE (op2)))
13400 /* Only perform transformation if ARG0 is easily inverted. */
13401 tem = fold_truth_not_expr (loc, arg0);
13403 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13404 fold_convert_loc (loc, type, tem),
13408 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13409 if (integer_onep (arg1)
13410 && truth_value_p (TREE_CODE (arg0))
13411 && truth_value_p (TREE_CODE (op2)))
13412 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13413 fold_convert_loc (loc, type, arg0),
13419 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13420 of fold_ternary on them. */
13421 gcc_unreachable ();
13423 case BIT_FIELD_REF:
13424 if ((TREE_CODE (arg0) == VECTOR_CST
13425 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13426 && type == TREE_TYPE (TREE_TYPE (arg0)))
13428 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13429 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13432 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13433 && (idx % width) == 0
13434 && (idx = idx / width)
13435 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13437 tree elements = NULL_TREE;
13439 if (TREE_CODE (arg0) == VECTOR_CST)
13440 elements = TREE_VECTOR_CST_ELTS (arg0);
13443 unsigned HOST_WIDE_INT idx;
13446 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13447 elements = tree_cons (NULL_TREE, value, elements);
13449 while (idx-- > 0 && elements)
13450 elements = TREE_CHAIN (elements);
13452 return TREE_VALUE (elements);
13454 return fold_convert_loc (loc, type, integer_zero_node);
13458 /* A bit-field-ref that referenced the full argument can be stripped. */
13459 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13460 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13461 && integer_zerop (op2))
13462 return fold_convert_loc (loc, type, arg0);
13468 } /* switch (code) */
13471 /* Perform constant folding and related simplification of EXPR.
13472 The related simplifications include x*1 => x, x*0 => 0, etc.,
13473 and application of the associative law.
13474 NOP_EXPR conversions may be removed freely (as long as we
13475 are careful not to change the type of the overall expression).
13476 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13477 but we can constant-fold them if they have constant operands. */
13479 #ifdef ENABLE_FOLD_CHECKING
13480 # define fold(x) fold_1 (x)
13481 static tree fold_1 (tree);
13487 const tree t = expr;
13488 enum tree_code code = TREE_CODE (t);
13489 enum tree_code_class kind = TREE_CODE_CLASS (code);
13491 location_t loc = EXPR_LOCATION (expr);
13493 /* Return right away if a constant. */
13494 if (kind == tcc_constant)
13497 /* CALL_EXPR-like objects with variable numbers of operands are
13498 treated specially. */
13499 if (kind == tcc_vl_exp)
13501 if (code == CALL_EXPR)
13503 tem = fold_call_expr (loc, expr, false);
13504 return tem ? tem : expr;
13509 if (IS_EXPR_CODE_CLASS (kind))
13511 tree type = TREE_TYPE (t);
13512 tree op0, op1, op2;
13514 switch (TREE_CODE_LENGTH (code))
13517 op0 = TREE_OPERAND (t, 0);
13518 tem = fold_unary_loc (loc, code, type, op0);
13519 return tem ? tem : expr;
13521 op0 = TREE_OPERAND (t, 0);
13522 op1 = TREE_OPERAND (t, 1);
13523 tem = fold_binary_loc (loc, code, type, op0, op1);
13524 return tem ? tem : expr;
13526 op0 = TREE_OPERAND (t, 0);
13527 op1 = TREE_OPERAND (t, 1);
13528 op2 = TREE_OPERAND (t, 2);
13529 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13530 return tem ? tem : expr;
13540 tree op0 = TREE_OPERAND (t, 0);
13541 tree op1 = TREE_OPERAND (t, 1);
13543 if (TREE_CODE (op1) == INTEGER_CST
13544 && TREE_CODE (op0) == CONSTRUCTOR
13545 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13547 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13548 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13549 unsigned HOST_WIDE_INT begin = 0;
13551 /* Find a matching index by means of a binary search. */
13552 while (begin != end)
13554 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13555 tree index = VEC_index (constructor_elt, elts, middle)->index;
13557 if (TREE_CODE (index) == INTEGER_CST
13558 && tree_int_cst_lt (index, op1))
13559 begin = middle + 1;
13560 else if (TREE_CODE (index) == INTEGER_CST
13561 && tree_int_cst_lt (op1, index))
13563 else if (TREE_CODE (index) == RANGE_EXPR
13564 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13565 begin = middle + 1;
13566 else if (TREE_CODE (index) == RANGE_EXPR
13567 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13570 return VEC_index (constructor_elt, elts, middle)->value;
13578 return fold (DECL_INITIAL (t));
13582 } /* switch (code) */
13585 #ifdef ENABLE_FOLD_CHECKING
13588 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13589 static void fold_check_failed (const_tree, const_tree);
13590 void print_fold_checksum (const_tree);
13592 /* When --enable-checking=fold, compute a digest of expr before
13593 and after actual fold call to see if fold did not accidentally
13594 change original expr. */
13600 struct md5_ctx ctx;
13601 unsigned char checksum_before[16], checksum_after[16];
13604 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13605 md5_init_ctx (&ctx);
13606 fold_checksum_tree (expr, &ctx, ht);
13607 md5_finish_ctx (&ctx, checksum_before);
13610 ret = fold_1 (expr);
13612 md5_init_ctx (&ctx);
13613 fold_checksum_tree (expr, &ctx, ht);
13614 md5_finish_ctx (&ctx, checksum_after);
13617 if (memcmp (checksum_before, checksum_after, 16))
13618 fold_check_failed (expr, ret);
13624 print_fold_checksum (const_tree expr)
13626 struct md5_ctx ctx;
13627 unsigned char checksum[16], cnt;
13630 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13631 md5_init_ctx (&ctx);
13632 fold_checksum_tree (expr, &ctx, ht);
13633 md5_finish_ctx (&ctx, checksum);
13635 for (cnt = 0; cnt < 16; ++cnt)
13636 fprintf (stderr, "%02x", checksum[cnt]);
13637 putc ('\n', stderr);
13641 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13643 internal_error ("fold check: original tree changed by fold");
13647 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13650 enum tree_code code;
13651 union tree_node buf;
13656 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13657 <= sizeof (struct tree_function_decl))
13658 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13661 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13665 code = TREE_CODE (expr);
13666 if (TREE_CODE_CLASS (code) == tcc_declaration
13667 && DECL_ASSEMBLER_NAME_SET_P (expr))
13669 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13670 memcpy ((char *) &buf, expr, tree_size (expr));
13671 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13672 expr = (tree) &buf;
13674 else if (TREE_CODE_CLASS (code) == tcc_type
13675 && (TYPE_POINTER_TO (expr)
13676 || TYPE_REFERENCE_TO (expr)
13677 || TYPE_CACHED_VALUES_P (expr)
13678 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13679 || TYPE_NEXT_VARIANT (expr)))
13681 /* Allow these fields to be modified. */
13683 memcpy ((char *) &buf, expr, tree_size (expr));
13684 expr = tmp = (tree) &buf;
13685 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13686 TYPE_POINTER_TO (tmp) = NULL;
13687 TYPE_REFERENCE_TO (tmp) = NULL;
13688 TYPE_NEXT_VARIANT (tmp) = NULL;
13689 if (TYPE_CACHED_VALUES_P (tmp))
13691 TYPE_CACHED_VALUES_P (tmp) = 0;
13692 TYPE_CACHED_VALUES (tmp) = NULL;
13695 md5_process_bytes (expr, tree_size (expr), ctx);
13696 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13697 if (TREE_CODE_CLASS (code) != tcc_type
13698 && TREE_CODE_CLASS (code) != tcc_declaration
13699 && code != TREE_LIST
13700 && code != SSA_NAME)
13701 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13702 switch (TREE_CODE_CLASS (code))
13708 md5_process_bytes (TREE_STRING_POINTER (expr),
13709 TREE_STRING_LENGTH (expr), ctx);
13712 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13713 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13716 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13722 case tcc_exceptional:
13726 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13727 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13728 expr = TREE_CHAIN (expr);
13729 goto recursive_label;
13732 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13733 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13739 case tcc_expression:
13740 case tcc_reference:
13741 case tcc_comparison:
13744 case tcc_statement:
13746 len = TREE_OPERAND_LENGTH (expr);
13747 for (i = 0; i < len; ++i)
13748 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13750 case tcc_declaration:
13751 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13752 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13753 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13755 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13756 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13757 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13758 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13759 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13761 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13762 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13764 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13766 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13767 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13768 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13772 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13773 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13774 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13775 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13776 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13777 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13778 if (INTEGRAL_TYPE_P (expr)
13779 || SCALAR_FLOAT_TYPE_P (expr))
13781 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13782 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13784 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13785 if (TREE_CODE (expr) == RECORD_TYPE
13786 || TREE_CODE (expr) == UNION_TYPE
13787 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13788 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13789 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13796 /* Helper function for outputting the checksum of a tree T. When
13797 debugging with gdb, you can "define mynext" to be "next" followed
13798 by "call debug_fold_checksum (op0)", then just trace down till the
13801 DEBUG_FUNCTION void
13802 debug_fold_checksum (const_tree t)
13805 unsigned char checksum[16];
13806 struct md5_ctx ctx;
13807 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13809 md5_init_ctx (&ctx);
13810 fold_checksum_tree (t, &ctx, ht);
13811 md5_finish_ctx (&ctx, checksum);
13814 for (i = 0; i < 16; i++)
13815 fprintf (stderr, "%d ", checksum[i]);
13817 fprintf (stderr, "\n");
13822 /* Fold a unary tree expression with code CODE of type TYPE with an
13823 operand OP0. LOC is the location of the resulting expression.
13824 Return a folded expression if successful. Otherwise, return a tree
13825 expression with code CODE of type TYPE with an operand OP0. */
13828 fold_build1_stat_loc (location_t loc,
13829 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13832 #ifdef ENABLE_FOLD_CHECKING
13833 unsigned char checksum_before[16], checksum_after[16];
13834 struct md5_ctx ctx;
13837 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13838 md5_init_ctx (&ctx);
13839 fold_checksum_tree (op0, &ctx, ht);
13840 md5_finish_ctx (&ctx, checksum_before);
13844 tem = fold_unary_loc (loc, code, type, op0);
13847 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13848 SET_EXPR_LOCATION (tem, loc);
13851 #ifdef ENABLE_FOLD_CHECKING
13852 md5_init_ctx (&ctx);
13853 fold_checksum_tree (op0, &ctx, ht);
13854 md5_finish_ctx (&ctx, checksum_after);
13857 if (memcmp (checksum_before, checksum_after, 16))
13858 fold_check_failed (op0, tem);
13863 /* Fold a binary tree expression with code CODE of type TYPE with
13864 operands OP0 and OP1. LOC is the location of the resulting
13865 expression. Return a folded expression if successful. Otherwise,
13866 return a tree expression with code CODE of type TYPE with operands
13870 fold_build2_stat_loc (location_t loc,
13871 enum tree_code code, tree type, tree op0, tree op1
13875 #ifdef ENABLE_FOLD_CHECKING
13876 unsigned char checksum_before_op0[16],
13877 checksum_before_op1[16],
13878 checksum_after_op0[16],
13879 checksum_after_op1[16];
13880 struct md5_ctx ctx;
13883 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13884 md5_init_ctx (&ctx);
13885 fold_checksum_tree (op0, &ctx, ht);
13886 md5_finish_ctx (&ctx, checksum_before_op0);
13889 md5_init_ctx (&ctx);
13890 fold_checksum_tree (op1, &ctx, ht);
13891 md5_finish_ctx (&ctx, checksum_before_op1);
13895 tem = fold_binary_loc (loc, code, type, op0, op1);
13898 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13899 SET_EXPR_LOCATION (tem, loc);
13902 #ifdef ENABLE_FOLD_CHECKING
13903 md5_init_ctx (&ctx);
13904 fold_checksum_tree (op0, &ctx, ht);
13905 md5_finish_ctx (&ctx, checksum_after_op0);
13908 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13909 fold_check_failed (op0, tem);
13911 md5_init_ctx (&ctx);
13912 fold_checksum_tree (op1, &ctx, ht);
13913 md5_finish_ctx (&ctx, checksum_after_op1);
13916 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13917 fold_check_failed (op1, tem);
13922 /* Fold a ternary tree expression with code CODE of type TYPE with
13923 operands OP0, OP1, and OP2. Return a folded expression if
13924 successful. Otherwise, return a tree expression with code CODE of
13925 type TYPE with operands OP0, OP1, and OP2. */
13928 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13929 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13932 #ifdef ENABLE_FOLD_CHECKING
13933 unsigned char checksum_before_op0[16],
13934 checksum_before_op1[16],
13935 checksum_before_op2[16],
13936 checksum_after_op0[16],
13937 checksum_after_op1[16],
13938 checksum_after_op2[16];
13939 struct md5_ctx ctx;
13942 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13943 md5_init_ctx (&ctx);
13944 fold_checksum_tree (op0, &ctx, ht);
13945 md5_finish_ctx (&ctx, checksum_before_op0);
13948 md5_init_ctx (&ctx);
13949 fold_checksum_tree (op1, &ctx, ht);
13950 md5_finish_ctx (&ctx, checksum_before_op1);
13953 md5_init_ctx (&ctx);
13954 fold_checksum_tree (op2, &ctx, ht);
13955 md5_finish_ctx (&ctx, checksum_before_op2);
13959 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13960 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13963 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13964 SET_EXPR_LOCATION (tem, loc);
13967 #ifdef ENABLE_FOLD_CHECKING
13968 md5_init_ctx (&ctx);
13969 fold_checksum_tree (op0, &ctx, ht);
13970 md5_finish_ctx (&ctx, checksum_after_op0);
13973 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13974 fold_check_failed (op0, tem);
13976 md5_init_ctx (&ctx);
13977 fold_checksum_tree (op1, &ctx, ht);
13978 md5_finish_ctx (&ctx, checksum_after_op1);
13981 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13982 fold_check_failed (op1, tem);
13984 md5_init_ctx (&ctx);
13985 fold_checksum_tree (op2, &ctx, ht);
13986 md5_finish_ctx (&ctx, checksum_after_op2);
13989 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13990 fold_check_failed (op2, tem);
13995 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13996 arguments in ARGARRAY, and a null static chain.
13997 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13998 of type TYPE from the given operands as constructed by build_call_array. */
14001 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14002 int nargs, tree *argarray)
14005 #ifdef ENABLE_FOLD_CHECKING
14006 unsigned char checksum_before_fn[16],
14007 checksum_before_arglist[16],
14008 checksum_after_fn[16],
14009 checksum_after_arglist[16];
14010 struct md5_ctx ctx;
14014 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14015 md5_init_ctx (&ctx);
14016 fold_checksum_tree (fn, &ctx, ht);
14017 md5_finish_ctx (&ctx, checksum_before_fn);
14020 md5_init_ctx (&ctx);
14021 for (i = 0; i < nargs; i++)
14022 fold_checksum_tree (argarray[i], &ctx, ht);
14023 md5_finish_ctx (&ctx, checksum_before_arglist);
14027 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14029 #ifdef ENABLE_FOLD_CHECKING
14030 md5_init_ctx (&ctx);
14031 fold_checksum_tree (fn, &ctx, ht);
14032 md5_finish_ctx (&ctx, checksum_after_fn);
14035 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14036 fold_check_failed (fn, tem);
14038 md5_init_ctx (&ctx);
14039 for (i = 0; i < nargs; i++)
14040 fold_checksum_tree (argarray[i], &ctx, ht);
14041 md5_finish_ctx (&ctx, checksum_after_arglist);
14044 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14045 fold_check_failed (NULL_TREE, tem);
14050 /* Perform constant folding and related simplification of initializer
14051 expression EXPR. These behave identically to "fold_buildN" but ignore
14052 potential run-time traps and exceptions that fold must preserve. */
14054 #define START_FOLD_INIT \
14055 int saved_signaling_nans = flag_signaling_nans;\
14056 int saved_trapping_math = flag_trapping_math;\
14057 int saved_rounding_math = flag_rounding_math;\
14058 int saved_trapv = flag_trapv;\
14059 int saved_folding_initializer = folding_initializer;\
14060 flag_signaling_nans = 0;\
14061 flag_trapping_math = 0;\
14062 flag_rounding_math = 0;\
14064 folding_initializer = 1;
14066 #define END_FOLD_INIT \
14067 flag_signaling_nans = saved_signaling_nans;\
14068 flag_trapping_math = saved_trapping_math;\
14069 flag_rounding_math = saved_rounding_math;\
14070 flag_trapv = saved_trapv;\
14071 folding_initializer = saved_folding_initializer;
14074 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14075 tree type, tree op)
14080 result = fold_build1_loc (loc, code, type, op);
14087 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14088 tree type, tree op0, tree op1)
14093 result = fold_build2_loc (loc, code, type, op0, op1);
14100 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14101 tree type, tree op0, tree op1, tree op2)
14106 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14113 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14114 int nargs, tree *argarray)
14119 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14125 #undef START_FOLD_INIT
14126 #undef END_FOLD_INIT
14128 /* Determine if first argument is a multiple of second argument. Return 0 if
14129 it is not, or we cannot easily determined it to be.
14131 An example of the sort of thing we care about (at this point; this routine
14132 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14133 fold cases do now) is discovering that
14135 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14141 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14143 This code also handles discovering that
14145 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14147 is a multiple of 8 so we don't have to worry about dealing with a
14148 possible remainder.
14150 Note that we *look* inside a SAVE_EXPR only to determine how it was
14151 calculated; it is not safe for fold to do much of anything else with the
14152 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14153 at run time. For example, the latter example above *cannot* be implemented
14154 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14155 evaluation time of the original SAVE_EXPR is not necessarily the same at
14156 the time the new expression is evaluated. The only optimization of this
14157 sort that would be valid is changing
14159 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14163 SAVE_EXPR (I) * SAVE_EXPR (J)
14165 (where the same SAVE_EXPR (J) is used in the original and the
14166 transformed version). */
14169 multiple_of_p (tree type, const_tree top, const_tree bottom)
14171 if (operand_equal_p (top, bottom, 0))
14174 if (TREE_CODE (type) != INTEGER_TYPE)
14177 switch (TREE_CODE (top))
14180 /* Bitwise and provides a power of two multiple. If the mask is
14181 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14182 if (!integer_pow2p (bottom))
14187 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14188 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14192 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14193 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14196 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14200 op1 = TREE_OPERAND (top, 1);
14201 /* const_binop may not detect overflow correctly,
14202 so check for it explicitly here. */
14203 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14204 > TREE_INT_CST_LOW (op1)
14205 && TREE_INT_CST_HIGH (op1) == 0
14206 && 0 != (t1 = fold_convert (type,
14207 const_binop (LSHIFT_EXPR,
14210 && !TREE_OVERFLOW (t1))
14211 return multiple_of_p (type, t1, bottom);
14216 /* Can't handle conversions from non-integral or wider integral type. */
14217 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14218 || (TYPE_PRECISION (type)
14219 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14222 /* .. fall through ... */
14225 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14228 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14229 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14232 if (TREE_CODE (bottom) != INTEGER_CST
14233 || integer_zerop (bottom)
14234 || (TYPE_UNSIGNED (type)
14235 && (tree_int_cst_sgn (top) < 0
14236 || tree_int_cst_sgn (bottom) < 0)))
14238 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14246 /* Return true if CODE or TYPE is known to be non-negative. */
14249 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14251 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14252 && truth_value_p (code))
14253 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14254 have a signed:1 type (where the value is -1 and 0). */
14259 /* Return true if (CODE OP0) is known to be non-negative. If the return
14260 value is based on the assumption that signed overflow is undefined,
14261 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14262 *STRICT_OVERFLOW_P. */
14265 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14266 bool *strict_overflow_p)
14268 if (TYPE_UNSIGNED (type))
14274 /* We can't return 1 if flag_wrapv is set because
14275 ABS_EXPR<INT_MIN> = INT_MIN. */
14276 if (!INTEGRAL_TYPE_P (type))
14278 if (TYPE_OVERFLOW_UNDEFINED (type))
14280 *strict_overflow_p = true;
14285 case NON_LVALUE_EXPR:
14287 case FIX_TRUNC_EXPR:
14288 return tree_expr_nonnegative_warnv_p (op0,
14289 strict_overflow_p);
14293 tree inner_type = TREE_TYPE (op0);
14294 tree outer_type = type;
14296 if (TREE_CODE (outer_type) == REAL_TYPE)
14298 if (TREE_CODE (inner_type) == REAL_TYPE)
14299 return tree_expr_nonnegative_warnv_p (op0,
14300 strict_overflow_p);
14301 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14303 if (TYPE_UNSIGNED (inner_type))
14305 return tree_expr_nonnegative_warnv_p (op0,
14306 strict_overflow_p);
14309 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14311 if (TREE_CODE (inner_type) == REAL_TYPE)
14312 return tree_expr_nonnegative_warnv_p (op0,
14313 strict_overflow_p);
14314 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14315 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14316 && TYPE_UNSIGNED (inner_type);
14322 return tree_simple_nonnegative_warnv_p (code, type);
14325 /* We don't know sign of `t', so be conservative and return false. */
14329 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14330 value is based on the assumption that signed overflow is undefined,
14331 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14332 *STRICT_OVERFLOW_P. */
14335 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14336 tree op1, bool *strict_overflow_p)
14338 if (TYPE_UNSIGNED (type))
14343 case POINTER_PLUS_EXPR:
14345 if (FLOAT_TYPE_P (type))
14346 return (tree_expr_nonnegative_warnv_p (op0,
14348 && tree_expr_nonnegative_warnv_p (op1,
14349 strict_overflow_p));
14351 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14352 both unsigned and at least 2 bits shorter than the result. */
14353 if (TREE_CODE (type) == INTEGER_TYPE
14354 && TREE_CODE (op0) == NOP_EXPR
14355 && TREE_CODE (op1) == NOP_EXPR)
14357 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14358 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14359 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14360 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14362 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14363 TYPE_PRECISION (inner2)) + 1;
14364 return prec < TYPE_PRECISION (type);
14370 if (FLOAT_TYPE_P (type))
14372 /* x * x for floating point x is always non-negative. */
14373 if (operand_equal_p (op0, op1, 0))
14375 return (tree_expr_nonnegative_warnv_p (op0,
14377 && tree_expr_nonnegative_warnv_p (op1,
14378 strict_overflow_p));
14381 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14382 both unsigned and their total bits is shorter than the result. */
14383 if (TREE_CODE (type) == INTEGER_TYPE
14384 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14385 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14387 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14388 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14390 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14391 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14394 bool unsigned0 = TYPE_UNSIGNED (inner0);
14395 bool unsigned1 = TYPE_UNSIGNED (inner1);
14397 if (TREE_CODE (op0) == INTEGER_CST)
14398 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14400 if (TREE_CODE (op1) == INTEGER_CST)
14401 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14403 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14404 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14406 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14407 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14408 : TYPE_PRECISION (inner0);
14410 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14411 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14412 : TYPE_PRECISION (inner1);
14414 return precision0 + precision1 < TYPE_PRECISION (type);
14421 return (tree_expr_nonnegative_warnv_p (op0,
14423 || tree_expr_nonnegative_warnv_p (op1,
14424 strict_overflow_p));
14430 case TRUNC_DIV_EXPR:
14431 case CEIL_DIV_EXPR:
14432 case FLOOR_DIV_EXPR:
14433 case ROUND_DIV_EXPR:
14434 return (tree_expr_nonnegative_warnv_p (op0,
14436 && tree_expr_nonnegative_warnv_p (op1,
14437 strict_overflow_p));
14439 case TRUNC_MOD_EXPR:
14440 case CEIL_MOD_EXPR:
14441 case FLOOR_MOD_EXPR:
14442 case ROUND_MOD_EXPR:
14443 return tree_expr_nonnegative_warnv_p (op0,
14444 strict_overflow_p);
14446 return tree_simple_nonnegative_warnv_p (code, type);
14449 /* We don't know sign of `t', so be conservative and return false. */
14453 /* Return true if T is known to be non-negative. If the return
14454 value is based on the assumption that signed overflow is undefined,
14455 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14456 *STRICT_OVERFLOW_P. */
14459 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14461 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14464 switch (TREE_CODE (t))
14467 return tree_int_cst_sgn (t) >= 0;
14470 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14473 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14476 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14478 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14479 strict_overflow_p));
14481 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14484 /* We don't know sign of `t', so be conservative and return false. */
14488 /* Return true if T is known to be non-negative. If the return
14489 value is based on the assumption that signed overflow is undefined,
14490 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14491 *STRICT_OVERFLOW_P. */
14494 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14495 tree arg0, tree arg1, bool *strict_overflow_p)
14497 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14498 switch (DECL_FUNCTION_CODE (fndecl))
14500 CASE_FLT_FN (BUILT_IN_ACOS):
14501 CASE_FLT_FN (BUILT_IN_ACOSH):
14502 CASE_FLT_FN (BUILT_IN_CABS):
14503 CASE_FLT_FN (BUILT_IN_COSH):
14504 CASE_FLT_FN (BUILT_IN_ERFC):
14505 CASE_FLT_FN (BUILT_IN_EXP):
14506 CASE_FLT_FN (BUILT_IN_EXP10):
14507 CASE_FLT_FN (BUILT_IN_EXP2):
14508 CASE_FLT_FN (BUILT_IN_FABS):
14509 CASE_FLT_FN (BUILT_IN_FDIM):
14510 CASE_FLT_FN (BUILT_IN_HYPOT):
14511 CASE_FLT_FN (BUILT_IN_POW10):
14512 CASE_INT_FN (BUILT_IN_FFS):
14513 CASE_INT_FN (BUILT_IN_PARITY):
14514 CASE_INT_FN (BUILT_IN_POPCOUNT):
14515 case BUILT_IN_BSWAP32:
14516 case BUILT_IN_BSWAP64:
14520 CASE_FLT_FN (BUILT_IN_SQRT):
14521 /* sqrt(-0.0) is -0.0. */
14522 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14524 return tree_expr_nonnegative_warnv_p (arg0,
14525 strict_overflow_p);
14527 CASE_FLT_FN (BUILT_IN_ASINH):
14528 CASE_FLT_FN (BUILT_IN_ATAN):
14529 CASE_FLT_FN (BUILT_IN_ATANH):
14530 CASE_FLT_FN (BUILT_IN_CBRT):
14531 CASE_FLT_FN (BUILT_IN_CEIL):
14532 CASE_FLT_FN (BUILT_IN_ERF):
14533 CASE_FLT_FN (BUILT_IN_EXPM1):
14534 CASE_FLT_FN (BUILT_IN_FLOOR):
14535 CASE_FLT_FN (BUILT_IN_FMOD):
14536 CASE_FLT_FN (BUILT_IN_FREXP):
14537 CASE_FLT_FN (BUILT_IN_LCEIL):
14538 CASE_FLT_FN (BUILT_IN_LDEXP):
14539 CASE_FLT_FN (BUILT_IN_LFLOOR):
14540 CASE_FLT_FN (BUILT_IN_LLCEIL):
14541 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14542 CASE_FLT_FN (BUILT_IN_LLRINT):
14543 CASE_FLT_FN (BUILT_IN_LLROUND):
14544 CASE_FLT_FN (BUILT_IN_LRINT):
14545 CASE_FLT_FN (BUILT_IN_LROUND):
14546 CASE_FLT_FN (BUILT_IN_MODF):
14547 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14548 CASE_FLT_FN (BUILT_IN_RINT):
14549 CASE_FLT_FN (BUILT_IN_ROUND):
14550 CASE_FLT_FN (BUILT_IN_SCALB):
14551 CASE_FLT_FN (BUILT_IN_SCALBLN):
14552 CASE_FLT_FN (BUILT_IN_SCALBN):
14553 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14554 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14555 CASE_FLT_FN (BUILT_IN_SINH):
14556 CASE_FLT_FN (BUILT_IN_TANH):
14557 CASE_FLT_FN (BUILT_IN_TRUNC):
14558 /* True if the 1st argument is nonnegative. */
14559 return tree_expr_nonnegative_warnv_p (arg0,
14560 strict_overflow_p);
14562 CASE_FLT_FN (BUILT_IN_FMAX):
14563 /* True if the 1st OR 2nd arguments are nonnegative. */
14564 return (tree_expr_nonnegative_warnv_p (arg0,
14566 || (tree_expr_nonnegative_warnv_p (arg1,
14567 strict_overflow_p)));
14569 CASE_FLT_FN (BUILT_IN_FMIN):
14570 /* True if the 1st AND 2nd arguments are nonnegative. */
14571 return (tree_expr_nonnegative_warnv_p (arg0,
14573 && (tree_expr_nonnegative_warnv_p (arg1,
14574 strict_overflow_p)));
14576 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14577 /* True if the 2nd argument is nonnegative. */
14578 return tree_expr_nonnegative_warnv_p (arg1,
14579 strict_overflow_p);
14581 CASE_FLT_FN (BUILT_IN_POWI):
14582 /* True if the 1st argument is nonnegative or the second
14583 argument is an even integer. */
14584 if (TREE_CODE (arg1) == INTEGER_CST
14585 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14587 return tree_expr_nonnegative_warnv_p (arg0,
14588 strict_overflow_p);
14590 CASE_FLT_FN (BUILT_IN_POW):
14591 /* True if the 1st argument is nonnegative or the second
14592 argument is an even integer valued real. */
14593 if (TREE_CODE (arg1) == REAL_CST)
14598 c = TREE_REAL_CST (arg1);
14599 n = real_to_integer (&c);
14602 REAL_VALUE_TYPE cint;
14603 real_from_integer (&cint, VOIDmode, n,
14604 n < 0 ? -1 : 0, 0);
14605 if (real_identical (&c, &cint))
14609 return tree_expr_nonnegative_warnv_p (arg0,
14610 strict_overflow_p);
14615 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14619 /* Return true if T is known to be non-negative. If the return
14620 value is based on the assumption that signed overflow is undefined,
14621 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14622 *STRICT_OVERFLOW_P. */
14625 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14627 enum tree_code code = TREE_CODE (t);
14628 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14635 tree temp = TARGET_EXPR_SLOT (t);
14636 t = TARGET_EXPR_INITIAL (t);
14638 /* If the initializer is non-void, then it's a normal expression
14639 that will be assigned to the slot. */
14640 if (!VOID_TYPE_P (t))
14641 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14643 /* Otherwise, the initializer sets the slot in some way. One common
14644 way is an assignment statement at the end of the initializer. */
14647 if (TREE_CODE (t) == BIND_EXPR)
14648 t = expr_last (BIND_EXPR_BODY (t));
14649 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14650 || TREE_CODE (t) == TRY_CATCH_EXPR)
14651 t = expr_last (TREE_OPERAND (t, 0));
14652 else if (TREE_CODE (t) == STATEMENT_LIST)
14657 if (TREE_CODE (t) == MODIFY_EXPR
14658 && TREE_OPERAND (t, 0) == temp)
14659 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14660 strict_overflow_p);
14667 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14668 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14670 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14671 get_callee_fndecl (t),
14674 strict_overflow_p);
14676 case COMPOUND_EXPR:
14678 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14679 strict_overflow_p);
14681 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14682 strict_overflow_p);
14684 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14685 strict_overflow_p);
14688 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14692 /* We don't know sign of `t', so be conservative and return false. */
14696 /* Return true if T is known to be non-negative. If the return
14697 value is based on the assumption that signed overflow is undefined,
14698 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14699 *STRICT_OVERFLOW_P. */
14702 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14704 enum tree_code code;
14705 if (t == error_mark_node)
14708 code = TREE_CODE (t);
14709 switch (TREE_CODE_CLASS (code))
14712 case tcc_comparison:
14713 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14715 TREE_OPERAND (t, 0),
14716 TREE_OPERAND (t, 1),
14717 strict_overflow_p);
14720 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14722 TREE_OPERAND (t, 0),
14723 strict_overflow_p);
14726 case tcc_declaration:
14727 case tcc_reference:
14728 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14736 case TRUTH_AND_EXPR:
14737 case TRUTH_OR_EXPR:
14738 case TRUTH_XOR_EXPR:
14739 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14741 TREE_OPERAND (t, 0),
14742 TREE_OPERAND (t, 1),
14743 strict_overflow_p);
14744 case TRUTH_NOT_EXPR:
14745 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14747 TREE_OPERAND (t, 0),
14748 strict_overflow_p);
14755 case WITH_SIZE_EXPR:
14757 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14760 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14764 /* Return true if `t' is known to be non-negative. Handle warnings
14765 about undefined signed overflow. */
14768 tree_expr_nonnegative_p (tree t)
14770 bool ret, strict_overflow_p;
14772 strict_overflow_p = false;
14773 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14774 if (strict_overflow_p)
14775 fold_overflow_warning (("assuming signed overflow does not occur when "
14776 "determining that expression is always "
14778 WARN_STRICT_OVERFLOW_MISC);
14783 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14784 For floating point we further ensure that T is not denormal.
14785 Similar logic is present in nonzero_address in rtlanal.h.
14787 If the return value is based on the assumption that signed overflow
14788 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14789 change *STRICT_OVERFLOW_P. */
14792 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14793 bool *strict_overflow_p)
14798 return tree_expr_nonzero_warnv_p (op0,
14799 strict_overflow_p);
14803 tree inner_type = TREE_TYPE (op0);
14804 tree outer_type = type;
14806 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14807 && tree_expr_nonzero_warnv_p (op0,
14808 strict_overflow_p));
14812 case NON_LVALUE_EXPR:
14813 return tree_expr_nonzero_warnv_p (op0,
14814 strict_overflow_p);
14823 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14824 For floating point we further ensure that T is not denormal.
14825 Similar logic is present in nonzero_address in rtlanal.h.
14827 If the return value is based on the assumption that signed overflow
14828 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14829 change *STRICT_OVERFLOW_P. */
14832 tree_binary_nonzero_warnv_p (enum tree_code code,
14835 tree op1, bool *strict_overflow_p)
14837 bool sub_strict_overflow_p;
14840 case POINTER_PLUS_EXPR:
14842 if (TYPE_OVERFLOW_UNDEFINED (type))
14844 /* With the presence of negative values it is hard
14845 to say something. */
14846 sub_strict_overflow_p = false;
14847 if (!tree_expr_nonnegative_warnv_p (op0,
14848 &sub_strict_overflow_p)
14849 || !tree_expr_nonnegative_warnv_p (op1,
14850 &sub_strict_overflow_p))
14852 /* One of operands must be positive and the other non-negative. */
14853 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14854 overflows, on a twos-complement machine the sum of two
14855 nonnegative numbers can never be zero. */
14856 return (tree_expr_nonzero_warnv_p (op0,
14858 || tree_expr_nonzero_warnv_p (op1,
14859 strict_overflow_p));
14864 if (TYPE_OVERFLOW_UNDEFINED (type))
14866 if (tree_expr_nonzero_warnv_p (op0,
14868 && tree_expr_nonzero_warnv_p (op1,
14869 strict_overflow_p))
14871 *strict_overflow_p = true;
14878 sub_strict_overflow_p = false;
14879 if (tree_expr_nonzero_warnv_p (op0,
14880 &sub_strict_overflow_p)
14881 && tree_expr_nonzero_warnv_p (op1,
14882 &sub_strict_overflow_p))
14884 if (sub_strict_overflow_p)
14885 *strict_overflow_p = true;
14890 sub_strict_overflow_p = false;
14891 if (tree_expr_nonzero_warnv_p (op0,
14892 &sub_strict_overflow_p))
14894 if (sub_strict_overflow_p)
14895 *strict_overflow_p = true;
14897 /* When both operands are nonzero, then MAX must be too. */
14898 if (tree_expr_nonzero_warnv_p (op1,
14899 strict_overflow_p))
14902 /* MAX where operand 0 is positive is positive. */
14903 return tree_expr_nonnegative_warnv_p (op0,
14904 strict_overflow_p);
14906 /* MAX where operand 1 is positive is positive. */
14907 else if (tree_expr_nonzero_warnv_p (op1,
14908 &sub_strict_overflow_p)
14909 && tree_expr_nonnegative_warnv_p (op1,
14910 &sub_strict_overflow_p))
14912 if (sub_strict_overflow_p)
14913 *strict_overflow_p = true;
14919 return (tree_expr_nonzero_warnv_p (op1,
14921 || tree_expr_nonzero_warnv_p (op0,
14922 strict_overflow_p));
14931 /* Return true when T is an address and is known to be nonzero.
14932 For floating point we further ensure that T is not denormal.
14933 Similar logic is present in nonzero_address in rtlanal.h.
14935 If the return value is based on the assumption that signed overflow
14936 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14937 change *STRICT_OVERFLOW_P. */
14940 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14942 bool sub_strict_overflow_p;
14943 switch (TREE_CODE (t))
14946 return !integer_zerop (t);
14950 tree base = TREE_OPERAND (t, 0);
14951 if (!DECL_P (base))
14952 base = get_base_address (base);
14957 /* Weak declarations may link to NULL. Other things may also be NULL
14958 so protect with -fdelete-null-pointer-checks; but not variables
14959 allocated on the stack. */
14961 && (flag_delete_null_pointer_checks
14962 || (DECL_CONTEXT (base)
14963 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14964 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
14965 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
14967 /* Constants are never weak. */
14968 if (CONSTANT_CLASS_P (base))
14975 sub_strict_overflow_p = false;
14976 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14977 &sub_strict_overflow_p)
14978 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14979 &sub_strict_overflow_p))
14981 if (sub_strict_overflow_p)
14982 *strict_overflow_p = true;
14993 /* Return true when T is an address and is known to be nonzero.
14994 For floating point we further ensure that T is not denormal.
14995 Similar logic is present in nonzero_address in rtlanal.h.
14997 If the return value is based on the assumption that signed overflow
14998 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14999 change *STRICT_OVERFLOW_P. */
15002 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15004 tree type = TREE_TYPE (t);
15005 enum tree_code code;
15007 /* Doing something useful for floating point would need more work. */
15008 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15011 code = TREE_CODE (t);
15012 switch (TREE_CODE_CLASS (code))
15015 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15016 strict_overflow_p);
15018 case tcc_comparison:
15019 return tree_binary_nonzero_warnv_p (code, type,
15020 TREE_OPERAND (t, 0),
15021 TREE_OPERAND (t, 1),
15022 strict_overflow_p);
15024 case tcc_declaration:
15025 case tcc_reference:
15026 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15034 case TRUTH_NOT_EXPR:
15035 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15036 strict_overflow_p);
15038 case TRUTH_AND_EXPR:
15039 case TRUTH_OR_EXPR:
15040 case TRUTH_XOR_EXPR:
15041 return tree_binary_nonzero_warnv_p (code, type,
15042 TREE_OPERAND (t, 0),
15043 TREE_OPERAND (t, 1),
15044 strict_overflow_p);
15051 case WITH_SIZE_EXPR:
15053 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15055 case COMPOUND_EXPR:
15058 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15059 strict_overflow_p);
15062 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15063 strict_overflow_p);
15066 return alloca_call_p (t);
15074 /* Return true when T is an address and is known to be nonzero.
15075 Handle warnings about undefined signed overflow. */
15078 tree_expr_nonzero_p (tree t)
15080 bool ret, strict_overflow_p;
15082 strict_overflow_p = false;
15083 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15084 if (strict_overflow_p)
15085 fold_overflow_warning (("assuming signed overflow does not occur when "
15086 "determining that expression is always "
15088 WARN_STRICT_OVERFLOW_MISC);
15092 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15093 attempt to fold the expression to a constant without modifying TYPE,
15096 If the expression could be simplified to a constant, then return
15097 the constant. If the expression would not be simplified to a
15098 constant, then return NULL_TREE. */
15101 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15103 tree tem = fold_binary (code, type, op0, op1);
15104 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15107 /* Given the components of a unary expression CODE, TYPE and OP0,
15108 attempt to fold the expression to a constant without modifying
15111 If the expression could be simplified to a constant, then return
15112 the constant. If the expression would not be simplified to a
15113 constant, then return NULL_TREE. */
15116 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15118 tree tem = fold_unary (code, type, op0);
15119 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15122 /* If EXP represents referencing an element in a constant string
15123 (either via pointer arithmetic or array indexing), return the
15124 tree representing the value accessed, otherwise return NULL. */
15127 fold_read_from_constant_string (tree exp)
15129 if ((TREE_CODE (exp) == INDIRECT_REF
15130 || TREE_CODE (exp) == ARRAY_REF)
15131 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15133 tree exp1 = TREE_OPERAND (exp, 0);
15136 location_t loc = EXPR_LOCATION (exp);
15138 if (TREE_CODE (exp) == INDIRECT_REF)
15139 string = string_constant (exp1, &index);
15142 tree low_bound = array_ref_low_bound (exp);
15143 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15145 /* Optimize the special-case of a zero lower bound.
15147 We convert the low_bound to sizetype to avoid some problems
15148 with constant folding. (E.g. suppose the lower bound is 1,
15149 and its mode is QI. Without the conversion,l (ARRAY
15150 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15151 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15152 if (! integer_zerop (low_bound))
15153 index = size_diffop_loc (loc, index,
15154 fold_convert_loc (loc, sizetype, low_bound));
15160 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15161 && TREE_CODE (string) == STRING_CST
15162 && TREE_CODE (index) == INTEGER_CST
15163 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15164 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15166 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15167 return build_int_cst_type (TREE_TYPE (exp),
15168 (TREE_STRING_POINTER (string)
15169 [TREE_INT_CST_LOW (index)]));
15174 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15175 an integer constant, real, or fixed-point constant.
15177 TYPE is the type of the result. */
15180 fold_negate_const (tree arg0, tree type)
15182 tree t = NULL_TREE;
15184 switch (TREE_CODE (arg0))
15188 double_int val = tree_to_double_int (arg0);
15189 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15191 t = force_fit_type_double (type, val, 1,
15192 (overflow | TREE_OVERFLOW (arg0))
15193 && !TYPE_UNSIGNED (type));
15198 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15203 FIXED_VALUE_TYPE f;
15204 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15205 &(TREE_FIXED_CST (arg0)), NULL,
15206 TYPE_SATURATING (type));
15207 t = build_fixed (type, f);
15208 /* Propagate overflow flags. */
15209 if (overflow_p | TREE_OVERFLOW (arg0))
15210 TREE_OVERFLOW (t) = 1;
15215 gcc_unreachable ();
15221 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15222 an integer constant or real constant.
15224 TYPE is the type of the result. */
15227 fold_abs_const (tree arg0, tree type)
15229 tree t = NULL_TREE;
15231 switch (TREE_CODE (arg0))
15235 double_int val = tree_to_double_int (arg0);
15237 /* If the value is unsigned or non-negative, then the absolute value
15238 is the same as the ordinary value. */
15239 if (TYPE_UNSIGNED (type)
15240 || !double_int_negative_p (val))
15243 /* If the value is negative, then the absolute value is
15249 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15250 t = force_fit_type_double (type, val, -1,
15251 overflow | TREE_OVERFLOW (arg0));
15257 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15258 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15264 gcc_unreachable ();
15270 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15271 constant. TYPE is the type of the result. */
15274 fold_not_const (const_tree arg0, tree type)
15278 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15280 val = double_int_not (tree_to_double_int (arg0));
15281 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15284 /* Given CODE, a relational operator, the target type, TYPE and two
15285 constant operands OP0 and OP1, return the result of the
15286 relational operation. If the result is not a compile time
15287 constant, then return NULL_TREE. */
15290 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15292 int result, invert;
15294 /* From here on, the only cases we handle are when the result is
15295 known to be a constant. */
15297 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15299 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15300 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15302 /* Handle the cases where either operand is a NaN. */
15303 if (real_isnan (c0) || real_isnan (c1))
15313 case UNORDERED_EXPR:
15327 if (flag_trapping_math)
15333 gcc_unreachable ();
15336 return constant_boolean_node (result, type);
15339 return constant_boolean_node (real_compare (code, c0, c1), type);
15342 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15344 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15345 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15346 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15349 /* Handle equality/inequality of complex constants. */
15350 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15352 tree rcond = fold_relational_const (code, type,
15353 TREE_REALPART (op0),
15354 TREE_REALPART (op1));
15355 tree icond = fold_relational_const (code, type,
15356 TREE_IMAGPART (op0),
15357 TREE_IMAGPART (op1));
15358 if (code == EQ_EXPR)
15359 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15360 else if (code == NE_EXPR)
15361 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15366 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15368 To compute GT, swap the arguments and do LT.
15369 To compute GE, do LT and invert the result.
15370 To compute LE, swap the arguments, do LT and invert the result.
15371 To compute NE, do EQ and invert the result.
15373 Therefore, the code below must handle only EQ and LT. */
15375 if (code == LE_EXPR || code == GT_EXPR)
15380 code = swap_tree_comparison (code);
15383 /* Note that it is safe to invert for real values here because we
15384 have already handled the one case that it matters. */
15387 if (code == NE_EXPR || code == GE_EXPR)
15390 code = invert_tree_comparison (code, false);
15393 /* Compute a result for LT or EQ if args permit;
15394 Otherwise return T. */
15395 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15397 if (code == EQ_EXPR)
15398 result = tree_int_cst_equal (op0, op1);
15399 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15400 result = INT_CST_LT_UNSIGNED (op0, op1);
15402 result = INT_CST_LT (op0, op1);
15409 return constant_boolean_node (result, type);
15412 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15413 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15417 fold_build_cleanup_point_expr (tree type, tree expr)
15419 /* If the expression does not have side effects then we don't have to wrap
15420 it with a cleanup point expression. */
15421 if (!TREE_SIDE_EFFECTS (expr))
15424 /* If the expression is a return, check to see if the expression inside the
15425 return has no side effects or the right hand side of the modify expression
15426 inside the return. If either don't have side effects set we don't need to
15427 wrap the expression in a cleanup point expression. Note we don't check the
15428 left hand side of the modify because it should always be a return decl. */
15429 if (TREE_CODE (expr) == RETURN_EXPR)
15431 tree op = TREE_OPERAND (expr, 0);
15432 if (!op || !TREE_SIDE_EFFECTS (op))
15434 op = TREE_OPERAND (op, 1);
15435 if (!TREE_SIDE_EFFECTS (op))
15439 return build1 (CLEANUP_POINT_EXPR, type, expr);
15442 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15443 of an indirection through OP0, or NULL_TREE if no simplification is
15447 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15453 subtype = TREE_TYPE (sub);
15454 if (!POINTER_TYPE_P (subtype))
15457 if (TREE_CODE (sub) == ADDR_EXPR)
15459 tree op = TREE_OPERAND (sub, 0);
15460 tree optype = TREE_TYPE (op);
15461 /* *&CONST_DECL -> to the value of the const decl. */
15462 if (TREE_CODE (op) == CONST_DECL)
15463 return DECL_INITIAL (op);
15464 /* *&p => p; make sure to handle *&"str"[cst] here. */
15465 if (type == optype)
15467 tree fop = fold_read_from_constant_string (op);
15473 /* *(foo *)&fooarray => fooarray[0] */
15474 else if (TREE_CODE (optype) == ARRAY_TYPE
15475 && type == TREE_TYPE (optype))
15477 tree type_domain = TYPE_DOMAIN (optype);
15478 tree min_val = size_zero_node;
15479 if (type_domain && TYPE_MIN_VALUE (type_domain))
15480 min_val = TYPE_MIN_VALUE (type_domain);
15481 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15482 SET_EXPR_LOCATION (op0, loc);
15485 /* *(foo *)&complexfoo => __real__ complexfoo */
15486 else if (TREE_CODE (optype) == COMPLEX_TYPE
15487 && type == TREE_TYPE (optype))
15488 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15489 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15490 else if (TREE_CODE (optype) == VECTOR_TYPE
15491 && type == TREE_TYPE (optype))
15493 tree part_width = TYPE_SIZE (type);
15494 tree index = bitsize_int (0);
15495 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15499 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15500 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15501 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15503 tree op00 = TREE_OPERAND (sub, 0);
15504 tree op01 = TREE_OPERAND (sub, 1);
15508 op00type = TREE_TYPE (op00);
15509 if (TREE_CODE (op00) == ADDR_EXPR
15510 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15511 && type == TREE_TYPE (TREE_TYPE (op00type)))
15513 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15514 tree part_width = TYPE_SIZE (type);
15515 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15516 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15517 tree index = bitsize_int (indexi);
15519 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15520 return fold_build3_loc (loc,
15521 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15522 part_width, index);
15528 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15529 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15530 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15532 tree op00 = TREE_OPERAND (sub, 0);
15533 tree op01 = TREE_OPERAND (sub, 1);
15537 op00type = TREE_TYPE (op00);
15538 if (TREE_CODE (op00) == ADDR_EXPR
15539 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15540 && type == TREE_TYPE (TREE_TYPE (op00type)))
15542 tree size = TYPE_SIZE_UNIT (type);
15543 if (tree_int_cst_equal (size, op01))
15544 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15545 TREE_OPERAND (op00, 0));
15549 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15550 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15551 && type == TREE_TYPE (TREE_TYPE (subtype)))
15554 tree min_val = size_zero_node;
15555 sub = build_fold_indirect_ref_loc (loc, sub);
15556 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15557 if (type_domain && TYPE_MIN_VALUE (type_domain))
15558 min_val = TYPE_MIN_VALUE (type_domain);
15559 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15560 SET_EXPR_LOCATION (op0, loc);
15567 /* Builds an expression for an indirection through T, simplifying some
15571 build_fold_indirect_ref_loc (location_t loc, tree t)
15573 tree type = TREE_TYPE (TREE_TYPE (t));
15574 tree sub = fold_indirect_ref_1 (loc, type, t);
15579 t = build1 (INDIRECT_REF, type, t);
15580 SET_EXPR_LOCATION (t, loc);
15584 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15587 fold_indirect_ref_loc (location_t loc, tree t)
15589 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15597 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15598 whose result is ignored. The type of the returned tree need not be
15599 the same as the original expression. */
15602 fold_ignored_result (tree t)
15604 if (!TREE_SIDE_EFFECTS (t))
15605 return integer_zero_node;
15608 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15611 t = TREE_OPERAND (t, 0);
15615 case tcc_comparison:
15616 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15617 t = TREE_OPERAND (t, 0);
15618 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15619 t = TREE_OPERAND (t, 1);
15624 case tcc_expression:
15625 switch (TREE_CODE (t))
15627 case COMPOUND_EXPR:
15628 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15630 t = TREE_OPERAND (t, 0);
15634 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15635 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15637 t = TREE_OPERAND (t, 0);
15650 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15651 This can only be applied to objects of a sizetype. */
15654 round_up_loc (location_t loc, tree value, int divisor)
15656 tree div = NULL_TREE;
15658 gcc_assert (divisor > 0);
15662 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15663 have to do anything. Only do this when we are not given a const,
15664 because in that case, this check is more expensive than just
15666 if (TREE_CODE (value) != INTEGER_CST)
15668 div = build_int_cst (TREE_TYPE (value), divisor);
15670 if (multiple_of_p (TREE_TYPE (value), value, div))
15674 /* If divisor is a power of two, simplify this to bit manipulation. */
15675 if (divisor == (divisor & -divisor))
15677 if (TREE_CODE (value) == INTEGER_CST)
15679 double_int val = tree_to_double_int (value);
15682 if ((val.low & (divisor - 1)) == 0)
15685 overflow_p = TREE_OVERFLOW (value);
15686 val.low &= ~(divisor - 1);
15687 val.low += divisor;
15695 return force_fit_type_double (TREE_TYPE (value), val,
15702 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15703 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15704 t = build_int_cst (TREE_TYPE (value), -divisor);
15705 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15711 div = build_int_cst (TREE_TYPE (value), divisor);
15712 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15713 value = size_binop_loc (loc, MULT_EXPR, value, div);
15719 /* Likewise, but round down. */
15722 round_down_loc (location_t loc, tree value, int divisor)
15724 tree div = NULL_TREE;
15726 gcc_assert (divisor > 0);
15730 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15731 have to do anything. Only do this when we are not given a const,
15732 because in that case, this check is more expensive than just
15734 if (TREE_CODE (value) != INTEGER_CST)
15736 div = build_int_cst (TREE_TYPE (value), divisor);
15738 if (multiple_of_p (TREE_TYPE (value), value, div))
15742 /* If divisor is a power of two, simplify this to bit manipulation. */
15743 if (divisor == (divisor & -divisor))
15747 t = build_int_cst (TREE_TYPE (value), -divisor);
15748 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15753 div = build_int_cst (TREE_TYPE (value), divisor);
15754 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15755 value = size_binop_loc (loc, MULT_EXPR, value, div);
15761 /* Returns the pointer to the base of the object addressed by EXP and
15762 extracts the information about the offset of the access, storing it
15763 to PBITPOS and POFFSET. */
15766 split_address_to_core_and_offset (tree exp,
15767 HOST_WIDE_INT *pbitpos, tree *poffset)
15770 enum machine_mode mode;
15771 int unsignedp, volatilep;
15772 HOST_WIDE_INT bitsize;
15773 location_t loc = EXPR_LOCATION (exp);
15775 if (TREE_CODE (exp) == ADDR_EXPR)
15777 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15778 poffset, &mode, &unsignedp, &volatilep,
15780 core = build_fold_addr_expr_loc (loc, core);
15786 *poffset = NULL_TREE;
15792 /* Returns true if addresses of E1 and E2 differ by a constant, false
15793 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15796 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15799 HOST_WIDE_INT bitpos1, bitpos2;
15800 tree toffset1, toffset2, tdiff, type;
15802 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15803 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15805 if (bitpos1 % BITS_PER_UNIT != 0
15806 || bitpos2 % BITS_PER_UNIT != 0
15807 || !operand_equal_p (core1, core2, 0))
15810 if (toffset1 && toffset2)
15812 type = TREE_TYPE (toffset1);
15813 if (type != TREE_TYPE (toffset2))
15814 toffset2 = fold_convert (type, toffset2);
15816 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15817 if (!cst_and_fits_in_hwi (tdiff))
15820 *diff = int_cst_value (tdiff);
15822 else if (toffset1 || toffset2)
15824 /* If only one of the offsets is non-constant, the difference cannot
15831 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15835 /* Simplify the floating point expression EXP when the sign of the
15836 result is not significant. Return NULL_TREE if no simplification
15840 fold_strip_sign_ops (tree exp)
15843 location_t loc = EXPR_LOCATION (exp);
15845 switch (TREE_CODE (exp))
15849 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15850 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15854 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15856 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15857 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15858 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15859 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15860 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15861 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15864 case COMPOUND_EXPR:
15865 arg0 = TREE_OPERAND (exp, 0);
15866 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15868 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15872 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15873 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15875 return fold_build3_loc (loc,
15876 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15877 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15878 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15883 const enum built_in_function fcode = builtin_mathfn_code (exp);
15886 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15887 /* Strip copysign function call, return the 1st argument. */
15888 arg0 = CALL_EXPR_ARG (exp, 0);
15889 arg1 = CALL_EXPR_ARG (exp, 1);
15890 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15893 /* Strip sign ops from the argument of "odd" math functions. */
15894 if (negate_mathfn_p (fcode))
15896 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15898 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);