1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
145 /* Similar to protected_set_expr_location, but never modify x in place,
146 if location can and needs to be set, unshare it. */
149 protected_set_expr_location_unshare (tree x, location_t loc)
151 if (CAN_HAVE_LOCATION_P (x)
152 && EXPR_LOCATION (x) != loc
153 && !(TREE_CODE (x) == SAVE_EXPR
154 || TREE_CODE (x) == TARGET_EXPR
155 || TREE_CODE (x) == BIND_EXPR))
158 SET_EXPR_LOCATION (x, loc);
164 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
165 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
166 and SUM1. Then this yields nonzero if overflow occurred during the
169 Overflow occurs if A and B have the same sign, but A and SUM differ in
170 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
172 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
174 /* If ARG2 divides ARG1 with zero remainder, carries out the division
175 of type CODE and returns the quotient.
176 Otherwise returns NULL_TREE. */
179 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
184 /* The sign of the division is according to operand two, that
185 does the correct thing for POINTER_PLUS_EXPR where we want
186 a signed division. */
187 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
188 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
189 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
192 quo = double_int_divmod (tree_to_double_int (arg1),
193 tree_to_double_int (arg2),
196 if (double_int_zero_p (rem))
197 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
211 static int fold_deferring_overflow_warnings;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
253 if (fold_deferred_overflow_warning != NULL
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
263 if (!issue || warnmsg == NULL)
266 if (gimple_no_warning_p (stmt))
269 /* Use the smallest code level when deciding to issue the
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
274 if (!issue_strict_overflow_warning (code))
278 locus = input_location;
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
284 /* Stop deferring overflow warnings, ignoring any deferred
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL, 0);
293 /* Whether we are deferring overflow warnings. */
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings > 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
307 if (fold_deferring_overflow_warnings > 0)
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
324 negate_mathfn_p (enum built_in_function code)
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
365 /* Check whether we may negate an integer constant T without causing
369 may_negate_without_overflow_p (const_tree t)
371 unsigned HOST_WIDE_INT val;
375 gcc_assert (TREE_CODE (t) == INTEGER_CST);
377 type = TREE_TYPE (t);
378 if (TYPE_UNSIGNED (type))
381 prec = TYPE_PRECISION (type);
382 if (prec > HOST_BITS_PER_WIDE_INT)
384 if (TREE_INT_CST_LOW (t) != 0)
386 prec -= HOST_BITS_PER_WIDE_INT;
387 val = TREE_INT_CST_HIGH (t);
390 val = TREE_INT_CST_LOW (t);
391 if (prec < HOST_BITS_PER_WIDE_INT)
392 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
393 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
396 /* Determine whether an expression T can be cheaply negated using
397 the function negate_expr without introducing undefined overflow. */
400 negate_expr_p (tree t)
407 type = TREE_TYPE (t);
410 switch (TREE_CODE (t))
413 if (TYPE_OVERFLOW_WRAPS (type))
416 /* Check that -CST will not overflow type. */
417 return may_negate_without_overflow_p (t);
419 return (INTEGRAL_TYPE_P (type)
420 && TYPE_OVERFLOW_WRAPS (type));
427 /* We want to canonicalize to positive real constants. Pretend
428 that only negative ones can be easily negated. */
429 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
432 return negate_expr_p (TREE_REALPART (t))
433 && negate_expr_p (TREE_IMAGPART (t));
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
440 return negate_expr_p (TREE_OPERAND (t, 0));
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1));
462 if (TYPE_UNSIGNED (TREE_TYPE (t)))
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
484 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
486 return negate_expr_p (TREE_OPERAND (t, 1))
487 || negate_expr_p (TREE_OPERAND (t, 0));
490 /* Negate -((double)float) as (double)(-float). */
491 if (TREE_CODE (type) == REAL_TYPE)
493 tree tem = strip_float_extensions (t);
495 return negate_expr_p (tem);
500 /* Negate -f(x) as f(-x). */
501 if (negate_mathfn_p (builtin_mathfn_code (t)))
502 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
507 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
509 tree op1 = TREE_OPERAND (t, 1);
510 if (TREE_INT_CST_HIGH (op1) == 0
511 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
512 == TREE_INT_CST_LOW (op1))
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
529 fold_negate_expr (location_t loc, tree t)
531 tree type = TREE_TYPE (t);
534 switch (TREE_CODE (t))
536 /* Convert - (~A) to A + 1. */
538 if (INTEGRAL_TYPE_P (type))
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 build_int_cst (type, 1));
544 tem = fold_negate_const (t, type);
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 || !TYPE_OVERFLOW_TRAPS (type))
551 tem = fold_negate_const (t, type);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
558 tem = fold_negate_const (t, type);
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
575 if (negate_expr_p (t))
576 return fold_build2_loc (loc, COMPLEX_EXPR, type,
577 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
578 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
582 if (negate_expr_p (t))
583 return fold_build1_loc (loc, CONJ_EXPR, type,
584 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
588 return TREE_OPERAND (t, 0);
591 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
592 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
594 /* -(A + B) -> (-B) - A. */
595 if (negate_expr_p (TREE_OPERAND (t, 1))
596 && reorder_operands_p (TREE_OPERAND (t, 0),
597 TREE_OPERAND (t, 1)))
599 tem = negate_expr (TREE_OPERAND (t, 1));
600 return fold_build2_loc (loc, MINUS_EXPR, type,
601 tem, TREE_OPERAND (t, 0));
604 /* -(A + B) -> (-A) - B. */
605 if (negate_expr_p (TREE_OPERAND (t, 0)))
607 tem = negate_expr (TREE_OPERAND (t, 0));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 1));
615 /* - (A - B) -> B - A */
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
617 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
618 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
619 return fold_build2_loc (loc, MINUS_EXPR, type,
620 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
624 if (TYPE_UNSIGNED (type))
630 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
632 tem = TREE_OPERAND (t, 1);
633 if (negate_expr_p (tem))
634 return fold_build2_loc (loc, TREE_CODE (t), type,
635 TREE_OPERAND (t, 0), negate_expr (tem));
636 tem = TREE_OPERAND (t, 0);
637 if (negate_expr_p (tem))
638 return fold_build2_loc (loc, TREE_CODE (t), type,
639 negate_expr (tem), TREE_OPERAND (t, 1));
648 /* In general we can't negate A / B, because if A is INT_MIN and
649 B is 1, we may turn this into INT_MIN / -1 which is undefined
650 and actually traps on some architectures. But if overflow is
651 undefined, we can negate, because - (INT_MIN / 1) is an
653 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
655 const char * const warnmsg = G_("assuming signed overflow does not "
656 "occur when negating a division");
657 tem = TREE_OPERAND (t, 1);
658 if (negate_expr_p (tem))
660 if (INTEGRAL_TYPE_P (type)
661 && (TREE_CODE (tem) != INTEGER_CST
662 || integer_onep (tem)))
663 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
664 return fold_build2_loc (loc, TREE_CODE (t), type,
665 TREE_OPERAND (t, 0), negate_expr (tem));
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
670 if (INTEGRAL_TYPE_P (type)
671 && (TREE_CODE (tem) != INTEGER_CST
672 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
673 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 negate_expr (tem), TREE_OPERAND (t, 1));
681 /* Convert -((double)float) into (double)(-float). */
682 if (TREE_CODE (type) == REAL_TYPE)
684 tem = strip_float_extensions (t);
685 if (tem != t && negate_expr_p (tem))
686 return fold_convert_loc (loc, type, negate_expr (tem));
691 /* Negate -f(x) as f(-x). */
692 if (negate_mathfn_p (builtin_mathfn_code (t))
693 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 fndecl = get_callee_fndecl (t);
698 arg = negate_expr (CALL_EXPR_ARG (t, 0));
699 return build_call_expr_loc (loc, fndecl, 1, arg);
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
705 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
707 tree op1 = TREE_OPERAND (t, 1);
708 if (TREE_INT_CST_HIGH (op1) == 0
709 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
710 == TREE_INT_CST_LOW (op1))
712 tree ntype = TYPE_UNSIGNED (type)
713 ? signed_type_for (type)
714 : unsigned_type_for (type);
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
742 loc = EXPR_LOCATION (t);
743 type = TREE_TYPE (t);
746 tem = fold_negate_expr (loc, t);
748 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
749 return fold_convert_loc (loc, type, tem);
752 /* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
756 the literal in *LITP and return the variable part. If a part isn't
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead.
767 If IN is itself a literal or constant, return it as appropriate.
769 Note that we do not guarantee that any of the three values will be the
770 same type as IN, but they will have the same signedness and mode. */
773 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
774 tree *minus_litp, int negate_p)
782 /* Strip any conversions that don't change the machine mode or signedness. */
783 STRIP_SIGN_NOPS (in);
785 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
786 || TREE_CODE (in) == FIXED_CST)
788 else if (TREE_CODE (in) == code
789 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
790 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
791 /* We can associate addition and subtraction together (even
792 though the C standard doesn't say so) for integers because
793 the value is not affected. For reals, the value might be
794 affected, so we can't. */
795 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
796 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
798 tree op0 = TREE_OPERAND (in, 0);
799 tree op1 = TREE_OPERAND (in, 1);
800 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
801 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
803 /* First see if either of the operands is a literal, then a constant. */
804 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
805 || TREE_CODE (op0) == FIXED_CST)
806 *litp = op0, op0 = 0;
807 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
808 || TREE_CODE (op1) == FIXED_CST)
809 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
811 if (op0 != 0 && TREE_CONSTANT (op0))
812 *conp = op0, op0 = 0;
813 else if (op1 != 0 && TREE_CONSTANT (op1))
814 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
816 /* If we haven't dealt with either operand, this is not a case we can
817 decompose. Otherwise, VAR is either of the ones remaining, if any. */
818 if (op0 != 0 && op1 != 0)
823 var = op1, neg_var_p = neg1_p;
825 /* Now do any needed negations. */
827 *minus_litp = *litp, *litp = 0;
829 *conp = negate_expr (*conp);
831 var = negate_expr (var);
833 else if (TREE_CONSTANT (in))
841 *minus_litp = *litp, *litp = 0;
842 else if (*minus_litp)
843 *litp = *minus_litp, *minus_litp = 0;
844 *conp = negate_expr (*conp);
845 var = negate_expr (var);
851 /* Re-associate trees split by the above function. T1 and T2 are
852 either expressions to associate or null. Return the new
853 expression, if any. LOC is the location of the new expression. If
854 we build an operation, do it in TYPE and with CODE. */
857 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
864 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
865 try to fold this since we will have infinite recursion. But do
866 deal with any NEGATE_EXPRs. */
867 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
868 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
870 if (code == PLUS_EXPR)
872 if (TREE_CODE (t1) == NEGATE_EXPR)
873 return build2_loc (loc, MINUS_EXPR, type,
874 fold_convert_loc (loc, type, t2),
875 fold_convert_loc (loc, type,
876 TREE_OPERAND (t1, 0)));
877 else if (TREE_CODE (t2) == NEGATE_EXPR)
878 return build2_loc (loc, MINUS_EXPR, type,
879 fold_convert_loc (loc, type, t1),
880 fold_convert_loc (loc, type,
881 TREE_OPERAND (t2, 0)));
882 else if (integer_zerop (t2))
883 return fold_convert_loc (loc, type, t1);
885 else if (code == MINUS_EXPR)
887 if (integer_zerop (t2))
888 return fold_convert_loc (loc, type, t1);
891 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
896 fold_convert_loc (loc, type, t2));
899 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
900 for use in int_const_binop, size_binop and size_diffop. */
903 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
905 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
907 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
922 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
923 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
924 && TYPE_MODE (type1) == TYPE_MODE (type2);
928 /* Combine two integer constants ARG1 and ARG2 under operation CODE
929 to produce a new constant. Return NULL_TREE if we don't know how
930 to evaluate CODE at compile-time.
932 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
935 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
937 double_int op1, op2, res, tmp;
939 tree type = TREE_TYPE (arg1);
940 bool uns = TYPE_UNSIGNED (type);
942 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
943 bool overflow = false;
945 op1 = tree_to_double_int (arg1);
946 op2 = tree_to_double_int (arg2);
951 res = double_int_ior (op1, op2);
955 res = double_int_xor (op1, op2);
959 res = double_int_and (op1, op2);
963 res = double_int_rshift (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type), !uns);
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
971 res = double_int_lshift (op1, double_int_to_shwi (op2),
972 TYPE_PRECISION (type), !uns);
976 res = double_int_rrotate (op1, double_int_to_shwi (op2),
977 TYPE_PRECISION (type));
981 res = double_int_lrotate (op1, double_int_to_shwi (op2),
982 TYPE_PRECISION (type));
986 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
987 &res.low, &res.high);
991 neg_double (op2.low, op2.high, &res.low, &res.high);
992 add_double (op1.low, op1.high, res.low, res.high,
993 &res.low, &res.high);
994 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
998 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
999 &res.low, &res.high);
1002 case TRUNC_DIV_EXPR:
1003 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1004 case EXACT_DIV_EXPR:
1005 /* This is a shortcut for a common special case. */
1006 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1007 && !TREE_OVERFLOW (arg1)
1008 && !TREE_OVERFLOW (arg2)
1009 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1011 if (code == CEIL_DIV_EXPR)
1012 op1.low += op2.low - 1;
1014 res.low = op1.low / op2.low, res.high = 0;
1018 /* ... fall through ... */
1020 case ROUND_DIV_EXPR:
1021 if (double_int_zero_p (op2))
1023 if (double_int_one_p (op2))
1028 if (double_int_equal_p (op1, op2)
1029 && ! double_int_zero_p (op1))
1031 res = double_int_one;
1034 overflow = div_and_round_double (code, uns,
1035 op1.low, op1.high, op2.low, op2.high,
1036 &res.low, &res.high,
1037 &tmp.low, &tmp.high);
1040 case TRUNC_MOD_EXPR:
1041 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1042 /* This is a shortcut for a common special case. */
1043 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1044 && !TREE_OVERFLOW (arg1)
1045 && !TREE_OVERFLOW (arg2)
1046 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1048 if (code == CEIL_MOD_EXPR)
1049 op1.low += op2.low - 1;
1050 res.low = op1.low % op2.low, res.high = 0;
1054 /* ... fall through ... */
1056 case ROUND_MOD_EXPR:
1057 if (double_int_zero_p (op2))
1059 overflow = div_and_round_double (code, uns,
1060 op1.low, op1.high, op2.low, op2.high,
1061 &tmp.low, &tmp.high,
1062 &res.low, &res.high);
1066 res = double_int_min (op1, op2, uns);
1070 res = double_int_max (op1, op2, uns);
1079 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1081 /* Propagate overflow flags ourselves. */
1082 if (((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1086 TREE_OVERFLOW (t) = 1;
1090 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1091 ((!uns || is_sizetype) && overflow)
1092 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1103 const_binop (enum tree_code code, tree arg1, tree arg2)
1105 /* Sanity check for the recursive cases. */
1112 if (TREE_CODE (arg1) == INTEGER_CST)
1113 return int_const_binop (code, arg1, arg2, 0);
1115 if (TREE_CODE (arg1) == REAL_CST)
1117 enum machine_mode mode;
1120 REAL_VALUE_TYPE value;
1121 REAL_VALUE_TYPE result;
1125 /* The following codes are handled by real_arithmetic. */
1140 d1 = TREE_REAL_CST (arg1);
1141 d2 = TREE_REAL_CST (arg2);
1143 type = TREE_TYPE (arg1);
1144 mode = TYPE_MODE (type);
1146 /* Don't perform operation if we honor signaling NaNs and
1147 either operand is a NaN. */
1148 if (HONOR_SNANS (mode)
1149 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1152 /* Don't perform operation if it would raise a division
1153 by zero exception. */
1154 if (code == RDIV_EXPR
1155 && REAL_VALUES_EQUAL (d2, dconst0)
1156 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1159 /* If either operand is a NaN, just return it. Otherwise, set up
1160 for floating-point trap; we return an overflow. */
1161 if (REAL_VALUE_ISNAN (d1))
1163 else if (REAL_VALUE_ISNAN (d2))
1166 inexact = real_arithmetic (&value, code, &d1, &d2);
1167 real_convert (&result, mode, &value);
1169 /* Don't constant fold this floating point operation if
1170 the result has overflowed and flag_trapping_math. */
1171 if (flag_trapping_math
1172 && MODE_HAS_INFINITIES (mode)
1173 && REAL_VALUE_ISINF (result)
1174 && !REAL_VALUE_ISINF (d1)
1175 && !REAL_VALUE_ISINF (d2))
1178 /* Don't constant fold this floating point operation if the
1179 result may dependent upon the run-time rounding mode and
1180 flag_rounding_math is set, or if GCC's software emulation
1181 is unable to accurately represent the result. */
1182 if ((flag_rounding_math
1183 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1184 && (inexact || !real_identical (&result, &value)))
1187 t = build_real (type, result);
1189 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1193 if (TREE_CODE (arg1) == FIXED_CST)
1195 FIXED_VALUE_TYPE f1;
1196 FIXED_VALUE_TYPE f2;
1197 FIXED_VALUE_TYPE result;
1202 /* The following codes are handled by fixed_arithmetic. */
1208 case TRUNC_DIV_EXPR:
1209 f2 = TREE_FIXED_CST (arg2);
1214 f2.data.high = TREE_INT_CST_HIGH (arg2);
1215 f2.data.low = TREE_INT_CST_LOW (arg2);
1223 f1 = TREE_FIXED_CST (arg1);
1224 type = TREE_TYPE (arg1);
1225 sat_p = TYPE_SATURATING (type);
1226 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1227 t = build_fixed (type, result);
1228 /* Propagate overflow flags. */
1229 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1230 TREE_OVERFLOW (t) = 1;
1234 if (TREE_CODE (arg1) == COMPLEX_CST)
1236 tree type = TREE_TYPE (arg1);
1237 tree r1 = TREE_REALPART (arg1);
1238 tree i1 = TREE_IMAGPART (arg1);
1239 tree r2 = TREE_REALPART (arg2);
1240 tree i2 = TREE_IMAGPART (arg2);
1247 real = const_binop (code, r1, r2);
1248 imag = const_binop (code, i1, i2);
1252 if (COMPLEX_FLOAT_TYPE_P (type))
1253 return do_mpc_arg2 (arg1, arg2, type,
1254 /* do_nonfinite= */ folding_initializer,
1257 real = const_binop (MINUS_EXPR,
1258 const_binop (MULT_EXPR, r1, r2),
1259 const_binop (MULT_EXPR, i1, i2));
1260 imag = const_binop (PLUS_EXPR,
1261 const_binop (MULT_EXPR, r1, i2),
1262 const_binop (MULT_EXPR, i1, r2));
1266 if (COMPLEX_FLOAT_TYPE_P (type))
1267 return do_mpc_arg2 (arg1, arg2, type,
1268 /* do_nonfinite= */ folding_initializer,
1271 case TRUNC_DIV_EXPR:
1273 case FLOOR_DIV_EXPR:
1274 case ROUND_DIV_EXPR:
1275 if (flag_complex_method == 0)
1277 /* Keep this algorithm in sync with
1278 tree-complex.c:expand_complex_div_straight().
1280 Expand complex division to scalars, straightforward algorithm.
1281 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1285 = const_binop (PLUS_EXPR,
1286 const_binop (MULT_EXPR, r2, r2),
1287 const_binop (MULT_EXPR, i2, i2));
1289 = const_binop (PLUS_EXPR,
1290 const_binop (MULT_EXPR, r1, r2),
1291 const_binop (MULT_EXPR, i1, i2));
1293 = const_binop (MINUS_EXPR,
1294 const_binop (MULT_EXPR, i1, r2),
1295 const_binop (MULT_EXPR, r1, i2));
1297 real = const_binop (code, t1, magsquared);
1298 imag = const_binop (code, t2, magsquared);
1302 /* Keep this algorithm in sync with
1303 tree-complex.c:expand_complex_div_wide().
1305 Expand complex division to scalars, modified algorithm to minimize
1306 overflow with wide input ranges. */
1307 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1308 fold_abs_const (r2, TREE_TYPE (type)),
1309 fold_abs_const (i2, TREE_TYPE (type)));
1311 if (integer_nonzerop (compare))
1313 /* In the TRUE branch, we compute
1315 div = (br * ratio) + bi;
1316 tr = (ar * ratio) + ai;
1317 ti = (ai * ratio) - ar;
1320 tree ratio = const_binop (code, r2, i2);
1321 tree div = const_binop (PLUS_EXPR, i2,
1322 const_binop (MULT_EXPR, r2, ratio));
1323 real = const_binop (MULT_EXPR, r1, ratio);
1324 real = const_binop (PLUS_EXPR, real, i1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, i1, ratio);
1328 imag = const_binop (MINUS_EXPR, imag, r1);
1329 imag = const_binop (code, imag, div);
1333 /* In the FALSE branch, we compute
1335 divisor = (d * ratio) + c;
1336 tr = (b * ratio) + a;
1337 ti = b - (a * ratio);
1340 tree ratio = const_binop (code, i2, r2);
1341 tree div = const_binop (PLUS_EXPR, r2,
1342 const_binop (MULT_EXPR, i2, ratio));
1344 real = const_binop (MULT_EXPR, i1, ratio);
1345 real = const_binop (PLUS_EXPR, real, r1);
1346 real = const_binop (code, real, div);
1348 imag = const_binop (MULT_EXPR, r1, ratio);
1349 imag = const_binop (MINUS_EXPR, i1, imag);
1350 imag = const_binop (code, imag, div);
1360 return build_complex (type, real, imag);
1363 if (TREE_CODE (arg1) == VECTOR_CST)
1365 tree type = TREE_TYPE(arg1);
1366 int count = TYPE_VECTOR_SUBPARTS (type), i;
1367 tree elements1, elements2, list = NULL_TREE;
1369 if(TREE_CODE(arg2) != VECTOR_CST)
1372 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1373 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1375 for (i = 0; i < count; i++)
1377 tree elem1, elem2, elem;
1379 /* The trailing elements can be empty and should be treated as 0 */
1381 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1384 elem1 = TREE_VALUE(elements1);
1385 elements1 = TREE_CHAIN (elements1);
1389 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1392 elem2 = TREE_VALUE(elements2);
1393 elements2 = TREE_CHAIN (elements2);
1396 elem = const_binop (code, elem1, elem2);
1398 /* It is possible that const_binop cannot handle the given
1399 code and return NULL_TREE */
1400 if(elem == NULL_TREE)
1403 list = tree_cons (NULL_TREE, elem, list);
1405 return build_vector(type, nreverse(list));
1410 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1411 indicates which particular sizetype to create. */
1414 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1416 return build_int_cst (sizetype_tab[(int) kind], number);
1419 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1420 is a tree code. The type of the result is taken from the operands.
1421 Both must be equivalent integer types, ala int_binop_types_match_p.
1422 If the operands are constant, so is the result. */
1425 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1427 tree type = TREE_TYPE (arg0);
1429 if (arg0 == error_mark_node || arg1 == error_mark_node)
1430 return error_mark_node;
1432 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1435 /* Handle the special case of two integer constants faster. */
1436 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1438 /* And some specific cases even faster than that. */
1439 if (code == PLUS_EXPR)
1441 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1443 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1446 else if (code == MINUS_EXPR)
1448 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1451 else if (code == MULT_EXPR)
1453 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1457 /* Handle general case of two integer constants. */
1458 return int_const_binop (code, arg0, arg1, 0);
1461 return fold_build2_loc (loc, code, type, arg0, arg1);
1464 /* Given two values, either both of sizetype or both of bitsizetype,
1465 compute the difference between the two values. Return the value
1466 in signed type corresponding to the type of the operands. */
1469 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1471 tree type = TREE_TYPE (arg0);
1474 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1477 /* If the type is already signed, just do the simple thing. */
1478 if (!TYPE_UNSIGNED (type))
1479 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1481 if (type == sizetype)
1483 else if (type == bitsizetype)
1484 ctype = sbitsizetype;
1486 ctype = signed_type_for (type);
1488 /* If either operand is not a constant, do the conversions to the signed
1489 type and subtract. The hardware will do the right thing with any
1490 overflow in the subtraction. */
1491 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1492 return size_binop_loc (loc, MINUS_EXPR,
1493 fold_convert_loc (loc, ctype, arg0),
1494 fold_convert_loc (loc, ctype, arg1));
1496 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1497 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1498 overflow) and negate (which can't either). Special-case a result
1499 of zero while we're here. */
1500 if (tree_int_cst_equal (arg0, arg1))
1501 return build_int_cst (ctype, 0);
1502 else if (tree_int_cst_lt (arg1, arg0))
1503 return fold_convert_loc (loc, ctype,
1504 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1506 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1507 fold_convert_loc (loc, ctype,
1508 size_binop_loc (loc,
1513 /* A subroutine of fold_convert_const handling conversions of an
1514 INTEGER_CST to another integer type. */
1517 fold_convert_const_int_from_int (tree type, const_tree arg1)
1521 /* Given an integer constant, make new constant with new type,
1522 appropriately sign-extended or truncated. */
1523 t = force_fit_type_double (type, tree_to_double_int (arg1),
1524 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1525 (TREE_INT_CST_HIGH (arg1) < 0
1526 && (TYPE_UNSIGNED (type)
1527 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1528 | TREE_OVERFLOW (arg1));
1533 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1534 to an integer type. */
1537 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1542 /* The following code implements the floating point to integer
1543 conversion rules required by the Java Language Specification,
1544 that IEEE NaNs are mapped to zero and values that overflow
1545 the target precision saturate, i.e. values greater than
1546 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1547 are mapped to INT_MIN. These semantics are allowed by the
1548 C and C++ standards that simply state that the behavior of
1549 FP-to-integer conversion is unspecified upon overflow. */
1553 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1557 case FIX_TRUNC_EXPR:
1558 real_trunc (&r, VOIDmode, &x);
1565 /* If R is NaN, return zero and show we have an overflow. */
1566 if (REAL_VALUE_ISNAN (r))
1569 val = double_int_zero;
1572 /* See if R is less than the lower bound or greater than the
1577 tree lt = TYPE_MIN_VALUE (type);
1578 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1579 if (REAL_VALUES_LESS (r, l))
1582 val = tree_to_double_int (lt);
1588 tree ut = TYPE_MAX_VALUE (type);
1591 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1592 if (REAL_VALUES_LESS (u, r))
1595 val = tree_to_double_int (ut);
1601 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1603 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1607 /* A subroutine of fold_convert_const handling conversions of a
1608 FIXED_CST to an integer type. */
1611 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1614 double_int temp, temp_trunc;
1617 /* Right shift FIXED_CST to temp by fbit. */
1618 temp = TREE_FIXED_CST (arg1).data;
1619 mode = TREE_FIXED_CST (arg1).mode;
1620 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1622 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1623 HOST_BITS_PER_DOUBLE_INT,
1624 SIGNED_FIXED_POINT_MODE_P (mode));
1626 /* Left shift temp to temp_trunc by fbit. */
1627 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1628 HOST_BITS_PER_DOUBLE_INT,
1629 SIGNED_FIXED_POINT_MODE_P (mode));
1633 temp = double_int_zero;
1634 temp_trunc = double_int_zero;
1637 /* If FIXED_CST is negative, we need to round the value toward 0.
1638 By checking if the fractional bits are not zero to add 1 to temp. */
1639 if (SIGNED_FIXED_POINT_MODE_P (mode)
1640 && double_int_negative_p (temp_trunc)
1641 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1642 temp = double_int_add (temp, double_int_one);
1644 /* Given a fixed-point constant, make new constant with new type,
1645 appropriately sign-extended or truncated. */
1646 t = force_fit_type_double (type, temp, -1,
1647 (double_int_negative_p (temp)
1648 && (TYPE_UNSIGNED (type)
1649 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1650 | TREE_OVERFLOW (arg1));
1655 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1656 to another floating point type. */
1659 fold_convert_const_real_from_real (tree type, const_tree arg1)
1661 REAL_VALUE_TYPE value;
1664 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1665 t = build_real (type, value);
1667 /* If converting an infinity or NAN to a representation that doesn't
1668 have one, set the overflow bit so that we can produce some kind of
1669 error message at the appropriate point if necessary. It's not the
1670 most user-friendly message, but it's better than nothing. */
1671 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1672 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1673 TREE_OVERFLOW (t) = 1;
1674 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1675 && !MODE_HAS_NANS (TYPE_MODE (type)))
1676 TREE_OVERFLOW (t) = 1;
1677 /* Regular overflow, conversion produced an infinity in a mode that
1678 can't represent them. */
1679 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1680 && REAL_VALUE_ISINF (value)
1681 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1682 TREE_OVERFLOW (t) = 1;
1684 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1688 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1689 to a floating point type. */
1692 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1694 REAL_VALUE_TYPE value;
1697 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1698 t = build_real (type, value);
1700 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1704 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1705 to another fixed-point type. */
1708 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1710 FIXED_VALUE_TYPE value;
1714 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1715 TYPE_SATURATING (type));
1716 t = build_fixed (type, value);
1718 /* Propagate overflow flags. */
1719 if (overflow_p | TREE_OVERFLOW (arg1))
1720 TREE_OVERFLOW (t) = 1;
1724 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1725 to a fixed-point type. */
1728 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1730 FIXED_VALUE_TYPE value;
1734 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1735 TREE_INT_CST (arg1),
1736 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1737 TYPE_SATURATING (type));
1738 t = build_fixed (type, value);
1740 /* Propagate overflow flags. */
1741 if (overflow_p | TREE_OVERFLOW (arg1))
1742 TREE_OVERFLOW (t) = 1;
1746 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1747 to a fixed-point type. */
1750 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1752 FIXED_VALUE_TYPE value;
1756 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1757 &TREE_REAL_CST (arg1),
1758 TYPE_SATURATING (type));
1759 t = build_fixed (type, value);
1761 /* Propagate overflow flags. */
1762 if (overflow_p | TREE_OVERFLOW (arg1))
1763 TREE_OVERFLOW (t) = 1;
1767 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1768 type TYPE. If no simplification can be done return NULL_TREE. */
1771 fold_convert_const (enum tree_code code, tree type, tree arg1)
1773 if (TREE_TYPE (arg1) == type)
1776 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1777 || TREE_CODE (type) == OFFSET_TYPE)
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return fold_convert_const_int_from_int (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_int_from_real (code, type, arg1);
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_int_from_fixed (type, arg1);
1786 else if (TREE_CODE (type) == REAL_TYPE)
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return build_real_from_int_cst (type, arg1);
1790 else if (TREE_CODE (arg1) == REAL_CST)
1791 return fold_convert_const_real_from_real (type, arg1);
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_real_from_fixed (type, arg1);
1795 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1797 if (TREE_CODE (arg1) == FIXED_CST)
1798 return fold_convert_const_fixed_from_fixed (type, arg1);
1799 else if (TREE_CODE (arg1) == INTEGER_CST)
1800 return fold_convert_const_fixed_from_int (type, arg1);
1801 else if (TREE_CODE (arg1) == REAL_CST)
1802 return fold_convert_const_fixed_from_real (type, arg1);
1807 /* Construct a vector of zero elements of vector type TYPE. */
1810 build_zero_vector (tree type)
1814 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1815 return build_vector_from_val (type, t);
1818 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1821 fold_convertible_p (const_tree type, const_tree arg)
1823 tree orig = TREE_TYPE (arg);
1828 if (TREE_CODE (arg) == ERROR_MARK
1829 || TREE_CODE (type) == ERROR_MARK
1830 || TREE_CODE (orig) == ERROR_MARK)
1833 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1836 switch (TREE_CODE (type))
1838 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1839 case POINTER_TYPE: case REFERENCE_TYPE:
1841 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1842 || TREE_CODE (orig) == OFFSET_TYPE)
1844 return (TREE_CODE (orig) == VECTOR_TYPE
1845 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1848 case FIXED_POINT_TYPE:
1852 return TREE_CODE (type) == TREE_CODE (orig);
1859 /* Convert expression ARG to type TYPE. Used by the middle-end for
1860 simple conversions in preference to calling the front-end's convert. */
1863 fold_convert_loc (location_t loc, tree type, tree arg)
1865 tree orig = TREE_TYPE (arg);
1871 if (TREE_CODE (arg) == ERROR_MARK
1872 || TREE_CODE (type) == ERROR_MARK
1873 || TREE_CODE (orig) == ERROR_MARK)
1874 return error_mark_node;
1876 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1877 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1879 switch (TREE_CODE (type))
1882 case REFERENCE_TYPE:
1883 /* Handle conversions between pointers to different address spaces. */
1884 if (POINTER_TYPE_P (orig)
1885 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1886 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1887 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1890 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1892 if (TREE_CODE (arg) == INTEGER_CST)
1894 tem = fold_convert_const (NOP_EXPR, type, arg);
1895 if (tem != NULL_TREE)
1898 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1899 || TREE_CODE (orig) == OFFSET_TYPE)
1900 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1901 if (TREE_CODE (orig) == COMPLEX_TYPE)
1902 return fold_convert_loc (loc, type,
1903 fold_build1_loc (loc, REALPART_EXPR,
1904 TREE_TYPE (orig), arg));
1905 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1906 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1907 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1910 if (TREE_CODE (arg) == INTEGER_CST)
1912 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1916 else if (TREE_CODE (arg) == REAL_CST)
1918 tem = fold_convert_const (NOP_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1922 else if (TREE_CODE (arg) == FIXED_CST)
1924 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1925 if (tem != NULL_TREE)
1929 switch (TREE_CODE (orig))
1932 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1933 case POINTER_TYPE: case REFERENCE_TYPE:
1934 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1937 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1939 case FIXED_POINT_TYPE:
1940 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1943 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert_loc (loc, type, tem);
1950 case FIXED_POINT_TYPE:
1951 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1952 || TREE_CODE (arg) == REAL_CST)
1954 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
1956 goto fold_convert_exit;
1959 switch (TREE_CODE (orig))
1961 case FIXED_POINT_TYPE:
1966 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1969 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1970 return fold_convert_loc (loc, type, tem);
1977 switch (TREE_CODE (orig))
1980 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1981 case POINTER_TYPE: case REFERENCE_TYPE:
1983 case FIXED_POINT_TYPE:
1984 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1985 fold_convert_loc (loc, TREE_TYPE (type), arg),
1986 fold_convert_loc (loc, TREE_TYPE (type),
1987 integer_zero_node));
1992 if (TREE_CODE (arg) == COMPLEX_EXPR)
1994 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1995 TREE_OPERAND (arg, 0));
1996 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1997 TREE_OPERAND (arg, 1));
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2001 arg = save_expr (arg);
2002 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2003 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2004 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2005 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2006 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2014 if (integer_zerop (arg))
2015 return build_zero_vector (type);
2016 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2017 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2018 || TREE_CODE (orig) == VECTOR_TYPE);
2019 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2022 tem = fold_ignored_result (arg);
2023 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2029 protected_set_expr_location_unshare (tem, loc);
2033 /* Return false if expr can be assumed not to be an lvalue, true
2037 maybe_lvalue_p (const_tree x)
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x))
2053 case ARRAY_RANGE_REF:
2059 case PREINCREMENT_EXPR:
2060 case PREDECREMENT_EXPR:
2062 case TRY_CATCH_EXPR:
2063 case WITH_CLEANUP_EXPR:
2072 /* Assume the worst for front-end tree codes. */
2073 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2081 /* Return an expr equal to X but certainly not valid as an lvalue. */
2084 non_lvalue_loc (location_t loc, tree x)
2086 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2091 if (! maybe_lvalue_p (x))
2093 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2096 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2097 Zero means allow extended lvalues. */
2099 int pedantic_lvalues;
2101 /* When pedantic, return an expr equal to X but certainly not valid as a
2102 pedantic lvalue. Otherwise, return X. */
2105 pedantic_non_lvalue_loc (location_t loc, tree x)
2107 if (pedantic_lvalues)
2108 return non_lvalue_loc (loc, x);
2110 return protected_set_expr_location_unshare (x, loc);
2113 /* Given a tree comparison code, return the code that is the logical inverse
2114 of the given code. It is not safe to do this for floating-point
2115 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2116 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2119 invert_tree_comparison (enum tree_code code, bool honor_nans)
2121 if (honor_nans && flag_trapping_math)
2131 return honor_nans ? UNLE_EXPR : LE_EXPR;
2133 return honor_nans ? UNLT_EXPR : LT_EXPR;
2135 return honor_nans ? UNGE_EXPR : GE_EXPR;
2137 return honor_nans ? UNGT_EXPR : GT_EXPR;
2151 return UNORDERED_EXPR;
2152 case UNORDERED_EXPR:
2153 return ORDERED_EXPR;
2159 /* Similar, but return the comparison that results if the operands are
2160 swapped. This is safe for floating-point. */
2163 swap_tree_comparison (enum tree_code code)
2170 case UNORDERED_EXPR:
2196 /* Convert a comparison tree code from an enum tree_code representation
2197 into a compcode bit-based encoding. This function is the inverse of
2198 compcode_to_comparison. */
2200 static enum comparison_code
2201 comparison_to_compcode (enum tree_code code)
2218 return COMPCODE_ORD;
2219 case UNORDERED_EXPR:
2220 return COMPCODE_UNORD;
2222 return COMPCODE_UNLT;
2224 return COMPCODE_UNEQ;
2226 return COMPCODE_UNLE;
2228 return COMPCODE_UNGT;
2230 return COMPCODE_LTGT;
2232 return COMPCODE_UNGE;
2238 /* Convert a compcode bit-based encoding of a comparison operator back
2239 to GCC's enum tree_code representation. This function is the
2240 inverse of comparison_to_compcode. */
2242 static enum tree_code
2243 compcode_to_comparison (enum comparison_code code)
2260 return ORDERED_EXPR;
2261 case COMPCODE_UNORD:
2262 return UNORDERED_EXPR;
2280 /* Return a tree for the comparison which is the combination of
2281 doing the AND or OR (depending on CODE) of the two operations LCODE
2282 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2283 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2284 if this makes the transformation invalid. */
2287 combine_comparisons (location_t loc,
2288 enum tree_code code, enum tree_code lcode,
2289 enum tree_code rcode, tree truth_type,
2290 tree ll_arg, tree lr_arg)
2292 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2293 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2294 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2299 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2300 compcode = lcompcode & rcompcode;
2303 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2304 compcode = lcompcode | rcompcode;
2313 /* Eliminate unordered comparisons, as well as LTGT and ORD
2314 which are not used unless the mode has NaNs. */
2315 compcode &= ~COMPCODE_UNORD;
2316 if (compcode == COMPCODE_LTGT)
2317 compcode = COMPCODE_NE;
2318 else if (compcode == COMPCODE_ORD)
2319 compcode = COMPCODE_TRUE;
2321 else if (flag_trapping_math)
2323 /* Check that the original operation and the optimized ones will trap
2324 under the same condition. */
2325 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2326 && (lcompcode != COMPCODE_EQ)
2327 && (lcompcode != COMPCODE_ORD);
2328 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2329 && (rcompcode != COMPCODE_EQ)
2330 && (rcompcode != COMPCODE_ORD);
2331 bool trap = (compcode & COMPCODE_UNORD) == 0
2332 && (compcode != COMPCODE_EQ)
2333 && (compcode != COMPCODE_ORD);
2335 /* In a short-circuited boolean expression the LHS might be
2336 such that the RHS, if evaluated, will never trap. For
2337 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2338 if neither x nor y is NaN. (This is a mixed blessing: for
2339 example, the expression above will never trap, hence
2340 optimizing it to x < y would be invalid). */
2341 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2342 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2345 /* If the comparison was short-circuited, and only the RHS
2346 trapped, we may now generate a spurious trap. */
2348 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2351 /* If we changed the conditions that cause a trap, we lose. */
2352 if ((ltrap || rtrap) != trap)
2356 if (compcode == COMPCODE_TRUE)
2357 return constant_boolean_node (true, truth_type);
2358 else if (compcode == COMPCODE_FALSE)
2359 return constant_boolean_node (false, truth_type);
2362 enum tree_code tcode;
2364 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2365 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2369 /* Return nonzero if two operands (typically of the same tree node)
2370 are necessarily equal. If either argument has side-effects this
2371 function returns zero. FLAGS modifies behavior as follows:
2373 If OEP_ONLY_CONST is set, only return nonzero for constants.
2374 This function tests whether the operands are indistinguishable;
2375 it does not test whether they are equal using C's == operation.
2376 The distinction is important for IEEE floating point, because
2377 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2378 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2380 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2381 even though it may hold multiple values during a function.
2382 This is because a GCC tree node guarantees that nothing else is
2383 executed between the evaluation of its "operands" (which may often
2384 be evaluated in arbitrary order). Hence if the operands themselves
2385 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2386 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2387 unset means assuming isochronic (or instantaneous) tree equivalence.
2388 Unless comparing arbitrary expression trees, such as from different
2389 statements, this flag can usually be left unset.
2391 If OEP_PURE_SAME is set, then pure functions with identical arguments
2392 are considered the same. It is used when the caller has other ways
2393 to ensure that global memory is unchanged in between. */
2396 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2398 /* If either is ERROR_MARK, they aren't equal. */
2399 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2400 || TREE_TYPE (arg0) == error_mark_node
2401 || TREE_TYPE (arg1) == error_mark_node)
2404 /* Similar, if either does not have a type (like a released SSA name),
2405 they aren't equal. */
2406 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2409 /* Check equality of integer constants before bailing out due to
2410 precision differences. */
2411 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2412 return tree_int_cst_equal (arg0, arg1);
2414 /* If both types don't have the same signedness, then we can't consider
2415 them equal. We must check this before the STRIP_NOPS calls
2416 because they may change the signedness of the arguments. As pointers
2417 strictly don't have a signedness, require either two pointers or
2418 two non-pointers as well. */
2419 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2420 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2423 /* We cannot consider pointers to different address space equal. */
2424 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2425 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2426 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2429 /* If both types don't have the same precision, then it is not safe
2431 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2437 /* In case both args are comparisons but with different comparison
2438 code, try to swap the comparison operands of one arg to produce
2439 a match and compare that variant. */
2440 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2441 && COMPARISON_CLASS_P (arg0)
2442 && COMPARISON_CLASS_P (arg1))
2444 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2446 if (TREE_CODE (arg0) == swap_code)
2447 return operand_equal_p (TREE_OPERAND (arg0, 0),
2448 TREE_OPERAND (arg1, 1), flags)
2449 && operand_equal_p (TREE_OPERAND (arg0, 1),
2450 TREE_OPERAND (arg1, 0), flags);
2453 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2454 /* This is needed for conversions and for COMPONENT_REF.
2455 Might as well play it safe and always test this. */
2456 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2457 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2458 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2461 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2462 We don't care about side effects in that case because the SAVE_EXPR
2463 takes care of that for us. In all other cases, two expressions are
2464 equal if they have no side effects. If we have two identical
2465 expressions with side effects that should be treated the same due
2466 to the only side effects being identical SAVE_EXPR's, that will
2467 be detected in the recursive calls below. */
2468 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2469 && (TREE_CODE (arg0) == SAVE_EXPR
2470 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2473 /* Next handle constant cases, those for which we can return 1 even
2474 if ONLY_CONST is set. */
2475 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2476 switch (TREE_CODE (arg0))
2479 return tree_int_cst_equal (arg0, arg1);
2482 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2483 TREE_FIXED_CST (arg1));
2486 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2487 TREE_REAL_CST (arg1)))
2491 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2493 /* If we do not distinguish between signed and unsigned zero,
2494 consider them equal. */
2495 if (real_zerop (arg0) && real_zerop (arg1))
2504 v1 = TREE_VECTOR_CST_ELTS (arg0);
2505 v2 = TREE_VECTOR_CST_ELTS (arg1);
2508 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2511 v1 = TREE_CHAIN (v1);
2512 v2 = TREE_CHAIN (v2);
2519 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2521 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2525 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2526 && ! memcmp (TREE_STRING_POINTER (arg0),
2527 TREE_STRING_POINTER (arg1),
2528 TREE_STRING_LENGTH (arg0)));
2531 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2537 if (flags & OEP_ONLY_CONST)
2540 /* Define macros to test an operand from arg0 and arg1 for equality and a
2541 variant that allows null and views null as being different from any
2542 non-null value. In the latter case, if either is null, the both
2543 must be; otherwise, do the normal comparison. */
2544 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2545 TREE_OPERAND (arg1, N), flags)
2547 #define OP_SAME_WITH_NULL(N) \
2548 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2549 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2551 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2554 /* Two conversions are equal only if signedness and modes match. */
2555 switch (TREE_CODE (arg0))
2558 case FIX_TRUNC_EXPR:
2559 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2560 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2570 case tcc_comparison:
2572 if (OP_SAME (0) && OP_SAME (1))
2575 /* For commutative ops, allow the other order. */
2576 return (commutative_tree_code (TREE_CODE (arg0))
2577 && operand_equal_p (TREE_OPERAND (arg0, 0),
2578 TREE_OPERAND (arg1, 1), flags)
2579 && operand_equal_p (TREE_OPERAND (arg0, 1),
2580 TREE_OPERAND (arg1, 0), flags));
2583 /* If either of the pointer (or reference) expressions we are
2584 dereferencing contain a side effect, these cannot be equal. */
2585 if (TREE_SIDE_EFFECTS (arg0)
2586 || TREE_SIDE_EFFECTS (arg1))
2589 switch (TREE_CODE (arg0))
2597 /* Require equal access sizes, and similar pointer types.
2598 We can have incomplete types for array references of
2599 variable-sized arrays from the Fortran frontent
2601 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2602 || (TYPE_SIZE (TREE_TYPE (arg0))
2603 && TYPE_SIZE (TREE_TYPE (arg1))
2604 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2605 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2606 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2607 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2608 && OP_SAME (0) && OP_SAME (1));
2611 case ARRAY_RANGE_REF:
2612 /* Operands 2 and 3 may be null.
2613 Compare the array index by value if it is constant first as we
2614 may have different types but same value here. */
2616 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2617 TREE_OPERAND (arg1, 1))
2619 && OP_SAME_WITH_NULL (2)
2620 && OP_SAME_WITH_NULL (3));
2623 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2624 may be NULL when we're called to compare MEM_EXPRs. */
2625 return OP_SAME_WITH_NULL (0)
2627 && OP_SAME_WITH_NULL (2);
2630 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 case tcc_expression:
2637 switch (TREE_CODE (arg0))
2640 case TRUTH_NOT_EXPR:
2643 case TRUTH_ANDIF_EXPR:
2644 case TRUTH_ORIF_EXPR:
2645 return OP_SAME (0) && OP_SAME (1);
2648 case WIDEN_MULT_PLUS_EXPR:
2649 case WIDEN_MULT_MINUS_EXPR:
2652 /* The multiplcation operands are commutative. */
2655 case TRUTH_AND_EXPR:
2657 case TRUTH_XOR_EXPR:
2658 if (OP_SAME (0) && OP_SAME (1))
2661 /* Otherwise take into account this is a commutative operation. */
2662 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2663 TREE_OPERAND (arg1, 1), flags)
2664 && operand_equal_p (TREE_OPERAND (arg0, 1),
2665 TREE_OPERAND (arg1, 0), flags));
2670 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2677 switch (TREE_CODE (arg0))
2680 /* If the CALL_EXPRs call different functions, then they
2681 clearly can not be equal. */
2682 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2687 unsigned int cef = call_expr_flags (arg0);
2688 if (flags & OEP_PURE_SAME)
2689 cef &= ECF_CONST | ECF_PURE;
2696 /* Now see if all the arguments are the same. */
2698 const_call_expr_arg_iterator iter0, iter1;
2700 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2701 a1 = first_const_call_expr_arg (arg1, &iter1);
2703 a0 = next_const_call_expr_arg (&iter0),
2704 a1 = next_const_call_expr_arg (&iter1))
2705 if (! operand_equal_p (a0, a1, flags))
2708 /* If we get here and both argument lists are exhausted
2709 then the CALL_EXPRs are equal. */
2710 return ! (a0 || a1);
2716 case tcc_declaration:
2717 /* Consider __builtin_sqrt equal to sqrt. */
2718 return (TREE_CODE (arg0) == FUNCTION_DECL
2719 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2720 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2721 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2728 #undef OP_SAME_WITH_NULL
2731 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2732 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2734 When in doubt, return 0. */
2737 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2739 int unsignedp1, unsignedpo;
2740 tree primarg0, primarg1, primother;
2741 unsigned int correct_width;
2743 if (operand_equal_p (arg0, arg1, 0))
2746 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2747 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2750 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2751 and see if the inner values are the same. This removes any
2752 signedness comparison, which doesn't matter here. */
2753 primarg0 = arg0, primarg1 = arg1;
2754 STRIP_NOPS (primarg0);
2755 STRIP_NOPS (primarg1);
2756 if (operand_equal_p (primarg0, primarg1, 0))
2759 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2760 actual comparison operand, ARG0.
2762 First throw away any conversions to wider types
2763 already present in the operands. */
2765 primarg1 = get_narrower (arg1, &unsignedp1);
2766 primother = get_narrower (other, &unsignedpo);
2768 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2769 if (unsignedp1 == unsignedpo
2770 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2771 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2773 tree type = TREE_TYPE (arg0);
2775 /* Make sure shorter operand is extended the right way
2776 to match the longer operand. */
2777 primarg1 = fold_convert (signed_or_unsigned_type_for
2778 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2780 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2787 /* See if ARG is an expression that is either a comparison or is performing
2788 arithmetic on comparisons. The comparisons must only be comparing
2789 two different values, which will be stored in *CVAL1 and *CVAL2; if
2790 they are nonzero it means that some operands have already been found.
2791 No variables may be used anywhere else in the expression except in the
2792 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2793 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2795 If this is true, return 1. Otherwise, return zero. */
2798 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2800 enum tree_code code = TREE_CODE (arg);
2801 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2803 /* We can handle some of the tcc_expression cases here. */
2804 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2806 else if (tclass == tcc_expression
2807 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2808 || code == COMPOUND_EXPR))
2809 tclass = tcc_binary;
2811 else if (tclass == tcc_expression && code == SAVE_EXPR
2812 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2814 /* If we've already found a CVAL1 or CVAL2, this expression is
2815 two complex to handle. */
2816 if (*cval1 || *cval2)
2826 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2829 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2830 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2831 cval1, cval2, save_p));
2836 case tcc_expression:
2837 if (code == COND_EXPR)
2838 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2841 cval1, cval2, save_p)
2842 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2843 cval1, cval2, save_p));
2846 case tcc_comparison:
2847 /* First see if we can handle the first operand, then the second. For
2848 the second operand, we know *CVAL1 can't be zero. It must be that
2849 one side of the comparison is each of the values; test for the
2850 case where this isn't true by failing if the two operands
2853 if (operand_equal_p (TREE_OPERAND (arg, 0),
2854 TREE_OPERAND (arg, 1), 0))
2858 *cval1 = TREE_OPERAND (arg, 0);
2859 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2861 else if (*cval2 == 0)
2862 *cval2 = TREE_OPERAND (arg, 0);
2863 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2868 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2870 else if (*cval2 == 0)
2871 *cval2 = TREE_OPERAND (arg, 1);
2872 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2884 /* ARG is a tree that is known to contain just arithmetic operations and
2885 comparisons. Evaluate the operations in the tree substituting NEW0 for
2886 any occurrence of OLD0 as an operand of a comparison and likewise for
2890 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2891 tree old1, tree new1)
2893 tree type = TREE_TYPE (arg);
2894 enum tree_code code = TREE_CODE (arg);
2895 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2897 /* We can handle some of the tcc_expression cases here. */
2898 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2900 else if (tclass == tcc_expression
2901 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2902 tclass = tcc_binary;
2907 return fold_build1_loc (loc, code, type,
2908 eval_subst (loc, TREE_OPERAND (arg, 0),
2909 old0, new0, old1, new1));
2912 return fold_build2_loc (loc, code, type,
2913 eval_subst (loc, TREE_OPERAND (arg, 0),
2914 old0, new0, old1, new1),
2915 eval_subst (loc, TREE_OPERAND (arg, 1),
2916 old0, new0, old1, new1));
2918 case tcc_expression:
2922 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2926 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2930 return fold_build3_loc (loc, code, type,
2931 eval_subst (loc, TREE_OPERAND (arg, 0),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 1),
2934 old0, new0, old1, new1),
2935 eval_subst (loc, TREE_OPERAND (arg, 2),
2936 old0, new0, old1, new1));
2940 /* Fall through - ??? */
2942 case tcc_comparison:
2944 tree arg0 = TREE_OPERAND (arg, 0);
2945 tree arg1 = TREE_OPERAND (arg, 1);
2947 /* We need to check both for exact equality and tree equality. The
2948 former will be true if the operand has a side-effect. In that
2949 case, we know the operand occurred exactly once. */
2951 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2953 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2956 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2958 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2961 return fold_build2_loc (loc, code, type, arg0, arg1);
2969 /* Return a tree for the case when the result of an expression is RESULT
2970 converted to TYPE and OMITTED was previously an operand of the expression
2971 but is now not needed (e.g., we folded OMITTED * 0).
2973 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2974 the conversion of RESULT to TYPE. */
2977 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2979 tree t = fold_convert_loc (loc, type, result);
2981 /* If the resulting operand is an empty statement, just return the omitted
2982 statement casted to void. */
2983 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2984 return build1_loc (loc, NOP_EXPR, void_type_node,
2985 fold_ignored_result (omitted));
2987 if (TREE_SIDE_EFFECTS (omitted))
2988 return build2_loc (loc, COMPOUND_EXPR, type,
2989 fold_ignored_result (omitted), t);
2991 return non_lvalue_loc (loc, t);
2994 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2997 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3000 tree t = fold_convert_loc (loc, type, result);
3002 /* If the resulting operand is an empty statement, just return the omitted
3003 statement casted to void. */
3004 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3005 return build1_loc (loc, NOP_EXPR, void_type_node,
3006 fold_ignored_result (omitted));
3008 if (TREE_SIDE_EFFECTS (omitted))
3009 return build2_loc (loc, COMPOUND_EXPR, type,
3010 fold_ignored_result (omitted), t);
3012 return pedantic_non_lvalue_loc (loc, t);
3015 /* Return a tree for the case when the result of an expression is RESULT
3016 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3017 of the expression but are now not needed.
3019 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3020 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3021 evaluated before OMITTED2. Otherwise, if neither has side effects,
3022 just do the conversion of RESULT to TYPE. */
3025 omit_two_operands_loc (location_t loc, tree type, tree result,
3026 tree omitted1, tree omitted2)
3028 tree t = fold_convert_loc (loc, type, result);
3030 if (TREE_SIDE_EFFECTS (omitted2))
3031 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3032 if (TREE_SIDE_EFFECTS (omitted1))
3033 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3035 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3039 /* Return a simplified tree node for the truth-negation of ARG. This
3040 never alters ARG itself. We assume that ARG is an operation that
3041 returns a truth value (0 or 1).
3043 FIXME: one would think we would fold the result, but it causes
3044 problems with the dominator optimizer. */
3047 fold_truth_not_expr (location_t loc, tree arg)
3049 tree type = TREE_TYPE (arg);
3050 enum tree_code code = TREE_CODE (arg);
3051 location_t loc1, loc2;
3053 /* If this is a comparison, we can simply invert it, except for
3054 floating-point non-equality comparisons, in which case we just
3055 enclose a TRUTH_NOT_EXPR around what we have. */
3057 if (TREE_CODE_CLASS (code) == tcc_comparison)
3059 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3060 if (FLOAT_TYPE_P (op_type)
3061 && flag_trapping_math
3062 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3063 && code != NE_EXPR && code != EQ_EXPR)
3066 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3067 if (code == ERROR_MARK)
3070 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3071 TREE_OPERAND (arg, 1));
3077 return constant_boolean_node (integer_zerop (arg), type);
3079 case TRUTH_AND_EXPR:
3080 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3081 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3082 if (loc1 == UNKNOWN_LOCATION)
3084 if (loc2 == UNKNOWN_LOCATION)
3086 return build2_loc (loc, TRUTH_OR_EXPR, type,
3087 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3088 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3091 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3092 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3093 if (loc1 == UNKNOWN_LOCATION)
3095 if (loc2 == UNKNOWN_LOCATION)
3097 return build2_loc (loc, TRUTH_AND_EXPR, type,
3098 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3099 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3101 case TRUTH_XOR_EXPR:
3102 /* Here we can invert either operand. We invert the first operand
3103 unless the second operand is a TRUTH_NOT_EXPR in which case our
3104 result is the XOR of the first operand with the inside of the
3105 negation of the second operand. */
3107 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3108 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3109 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3111 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3112 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3113 TREE_OPERAND (arg, 1));
3115 case TRUTH_ANDIF_EXPR:
3116 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3117 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3118 if (loc1 == UNKNOWN_LOCATION)
3120 if (loc2 == UNKNOWN_LOCATION)
3122 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3123 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3124 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3126 case TRUTH_ORIF_EXPR:
3127 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3128 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3129 if (loc1 == UNKNOWN_LOCATION)
3131 if (loc2 == UNKNOWN_LOCATION)
3133 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3134 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3135 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3137 case TRUTH_NOT_EXPR:
3138 return TREE_OPERAND (arg, 0);
3142 tree arg1 = TREE_OPERAND (arg, 1);
3143 tree arg2 = TREE_OPERAND (arg, 2);
3145 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3146 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3147 if (loc1 == UNKNOWN_LOCATION)
3149 if (loc2 == UNKNOWN_LOCATION)
3152 /* A COND_EXPR may have a throw as one operand, which
3153 then has void type. Just leave void operands
3155 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3156 VOID_TYPE_P (TREE_TYPE (arg1))
3157 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3158 VOID_TYPE_P (TREE_TYPE (arg2))
3159 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3163 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3164 if (loc1 == UNKNOWN_LOCATION)
3166 return build2_loc (loc, COMPOUND_EXPR, type,
3167 TREE_OPERAND (arg, 0),
3168 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3170 case NON_LVALUE_EXPR:
3171 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3172 if (loc1 == UNKNOWN_LOCATION)
3174 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3177 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3178 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3180 /* ... fall through ... */
3183 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3184 if (loc1 == UNKNOWN_LOCATION)
3186 return build1_loc (loc, TREE_CODE (arg), type,
3187 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3190 if (!integer_onep (TREE_OPERAND (arg, 1)))
3192 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3195 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3197 case CLEANUP_POINT_EXPR:
3198 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3199 if (loc1 == UNKNOWN_LOCATION)
3201 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3202 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3209 /* Return a simplified tree node for the truth-negation of ARG. This
3210 never alters ARG itself. We assume that ARG is an operation that
3211 returns a truth value (0 or 1).
3213 FIXME: one would think we would fold the result, but it causes
3214 problems with the dominator optimizer. */
3217 invert_truthvalue_loc (location_t loc, tree arg)
3221 if (TREE_CODE (arg) == ERROR_MARK)
3224 tem = fold_truth_not_expr (loc, arg);
3226 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3231 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3232 operands are another bit-wise operation with a common input. If so,
3233 distribute the bit operations to save an operation and possibly two if
3234 constants are involved. For example, convert
3235 (A | B) & (A | C) into A | (B & C)
3236 Further simplification will occur if B and C are constants.
3238 If this optimization cannot be done, 0 will be returned. */
3241 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3242 tree arg0, tree arg1)
3247 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3248 || TREE_CODE (arg0) == code
3249 || (TREE_CODE (arg0) != BIT_AND_EXPR
3250 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3253 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3255 common = TREE_OPERAND (arg0, 0);
3256 left = TREE_OPERAND (arg0, 1);
3257 right = TREE_OPERAND (arg1, 1);
3259 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3261 common = TREE_OPERAND (arg0, 0);
3262 left = TREE_OPERAND (arg0, 1);
3263 right = TREE_OPERAND (arg1, 0);
3265 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3267 common = TREE_OPERAND (arg0, 1);
3268 left = TREE_OPERAND (arg0, 0);
3269 right = TREE_OPERAND (arg1, 1);
3271 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3273 common = TREE_OPERAND (arg0, 1);
3274 left = TREE_OPERAND (arg0, 0);
3275 right = TREE_OPERAND (arg1, 0);
3280 common = fold_convert_loc (loc, type, common);
3281 left = fold_convert_loc (loc, type, left);
3282 right = fold_convert_loc (loc, type, right);
3283 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3284 fold_build2_loc (loc, code, type, left, right));
3287 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3288 with code CODE. This optimization is unsafe. */
3290 distribute_real_division (location_t loc, enum tree_code code, tree type,
3291 tree arg0, tree arg1)
3293 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3294 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3296 /* (A / C) +- (B / C) -> (A +- B) / C. */
3298 && operand_equal_p (TREE_OPERAND (arg0, 1),
3299 TREE_OPERAND (arg1, 1), 0))
3300 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3301 fold_build2_loc (loc, code, type,
3302 TREE_OPERAND (arg0, 0),
3303 TREE_OPERAND (arg1, 0)),
3304 TREE_OPERAND (arg0, 1));
3306 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3307 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3308 TREE_OPERAND (arg1, 0), 0)
3309 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3310 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3312 REAL_VALUE_TYPE r0, r1;
3313 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3314 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3316 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3318 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3319 real_arithmetic (&r0, code, &r0, &r1);
3320 return fold_build2_loc (loc, MULT_EXPR, type,
3321 TREE_OPERAND (arg0, 0),
3322 build_real (type, r0));
3328 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3329 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3332 make_bit_field_ref (location_t loc, tree inner, tree type,
3333 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3335 tree result, bftype;
3339 tree size = TYPE_SIZE (TREE_TYPE (inner));
3340 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3341 || POINTER_TYPE_P (TREE_TYPE (inner)))
3342 && host_integerp (size, 0)
3343 && tree_low_cst (size, 0) == bitsize)
3344 return fold_convert_loc (loc, type, inner);
3348 if (TYPE_PRECISION (bftype) != bitsize
3349 || TYPE_UNSIGNED (bftype) == !unsignedp)
3350 bftype = build_nonstandard_integer_type (bitsize, 0);
3352 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3353 size_int (bitsize), bitsize_int (bitpos));
3356 result = fold_convert_loc (loc, type, result);
3361 /* Optimize a bit-field compare.
3363 There are two cases: First is a compare against a constant and the
3364 second is a comparison of two items where the fields are at the same
3365 bit position relative to the start of a chunk (byte, halfword, word)
3366 large enough to contain it. In these cases we can avoid the shift
3367 implicit in bitfield extractions.
3369 For constants, we emit a compare of the shifted constant with the
3370 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3371 compared. For two fields at the same position, we do the ANDs with the
3372 similar mask and compare the result of the ANDs.
3374 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3375 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3376 are the left and right operands of the comparison, respectively.
3378 If the optimization described above can be done, we return the resulting
3379 tree. Otherwise we return zero. */
3382 optimize_bit_field_compare (location_t loc, enum tree_code code,
3383 tree compare_type, tree lhs, tree rhs)
3385 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3386 tree type = TREE_TYPE (lhs);
3387 tree signed_type, unsigned_type;
3388 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3389 enum machine_mode lmode, rmode, nmode;
3390 int lunsignedp, runsignedp;
3391 int lvolatilep = 0, rvolatilep = 0;
3392 tree linner, rinner = NULL_TREE;
3396 /* Get all the information about the extractions being done. If the bit size
3397 if the same as the size of the underlying object, we aren't doing an
3398 extraction at all and so can do nothing. We also don't want to
3399 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3400 then will no longer be able to replace it. */
3401 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3402 &lunsignedp, &lvolatilep, false);
3403 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3404 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3409 /* If this is not a constant, we can only do something if bit positions,
3410 sizes, and signedness are the same. */
3411 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3412 &runsignedp, &rvolatilep, false);
3414 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3415 || lunsignedp != runsignedp || offset != 0
3416 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3420 /* See if we can find a mode to refer to this field. We should be able to,
3421 but fail if we can't. */
3423 && GET_MODE_BITSIZE (lmode) > 0
3424 && flag_strict_volatile_bitfields > 0)
3427 nmode = get_best_mode (lbitsize, lbitpos,
3428 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3429 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3430 TYPE_ALIGN (TREE_TYPE (rinner))),
3431 word_mode, lvolatilep || rvolatilep);
3432 if (nmode == VOIDmode)
3435 /* Set signed and unsigned types of the precision of this mode for the
3437 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3438 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3440 /* Compute the bit position and size for the new reference and our offset
3441 within it. If the new reference is the same size as the original, we
3442 won't optimize anything, so return zero. */
3443 nbitsize = GET_MODE_BITSIZE (nmode);
3444 nbitpos = lbitpos & ~ (nbitsize - 1);
3446 if (nbitsize == lbitsize)
3449 if (BYTES_BIG_ENDIAN)
3450 lbitpos = nbitsize - lbitsize - lbitpos;
3452 /* Make the mask to be used against the extracted field. */
3453 mask = build_int_cst_type (unsigned_type, -1);
3454 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3455 mask = const_binop (RSHIFT_EXPR, mask,
3456 size_int (nbitsize - lbitsize - lbitpos));
3459 /* If not comparing with constant, just rework the comparison
3461 return fold_build2_loc (loc, code, compare_type,
3462 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3463 make_bit_field_ref (loc, linner,
3468 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3469 make_bit_field_ref (loc, rinner,
3475 /* Otherwise, we are handling the constant case. See if the constant is too
3476 big for the field. Warn and return a tree of for 0 (false) if so. We do
3477 this not only for its own sake, but to avoid having to test for this
3478 error case below. If we didn't, we might generate wrong code.
3480 For unsigned fields, the constant shifted right by the field length should
3481 be all zero. For signed fields, the high-order bits should agree with
3486 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3487 fold_convert_loc (loc,
3488 unsigned_type, rhs),
3489 size_int (lbitsize))))
3491 warning (0, "comparison is always %d due to width of bit-field",
3493 return constant_boolean_node (code == NE_EXPR, compare_type);
3498 tree tem = const_binop (RSHIFT_EXPR,
3499 fold_convert_loc (loc, signed_type, rhs),
3500 size_int (lbitsize - 1));
3501 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3503 warning (0, "comparison is always %d due to width of bit-field",
3505 return constant_boolean_node (code == NE_EXPR, compare_type);
3509 /* Single-bit compares should always be against zero. */
3510 if (lbitsize == 1 && ! integer_zerop (rhs))
3512 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3513 rhs = build_int_cst (type, 0);
3516 /* Make a new bitfield reference, shift the constant over the
3517 appropriate number of bits and mask it with the computed mask
3518 (in case this was a signed field). If we changed it, make a new one. */
3519 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3522 TREE_SIDE_EFFECTS (lhs) = 1;
3523 TREE_THIS_VOLATILE (lhs) = 1;
3526 rhs = const_binop (BIT_AND_EXPR,
3527 const_binop (LSHIFT_EXPR,
3528 fold_convert_loc (loc, unsigned_type, rhs),
3529 size_int (lbitpos)),
3532 lhs = build2_loc (loc, code, compare_type,
3533 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3537 /* Subroutine for fold_truthop: decode a field reference.
3539 If EXP is a comparison reference, we return the innermost reference.
3541 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3542 set to the starting bit number.
3544 If the innermost field can be completely contained in a mode-sized
3545 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3547 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3548 otherwise it is not changed.
3550 *PUNSIGNEDP is set to the signedness of the field.
3552 *PMASK is set to the mask used. This is either contained in a
3553 BIT_AND_EXPR or derived from the width of the field.
3555 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3557 Return 0 if this is not a component reference or is one that we can't
3558 do anything with. */
3561 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3562 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3563 int *punsignedp, int *pvolatilep,
3564 tree *pmask, tree *pand_mask)
3566 tree outer_type = 0;
3568 tree mask, inner, offset;
3570 unsigned int precision;
3572 /* All the optimizations using this function assume integer fields.
3573 There are problems with FP fields since the type_for_size call
3574 below can fail for, e.g., XFmode. */
3575 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3578 /* We are interested in the bare arrangement of bits, so strip everything
3579 that doesn't affect the machine mode. However, record the type of the
3580 outermost expression if it may matter below. */
3581 if (CONVERT_EXPR_P (exp)
3582 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3583 outer_type = TREE_TYPE (exp);
3586 if (TREE_CODE (exp) == BIT_AND_EXPR)
3588 and_mask = TREE_OPERAND (exp, 1);
3589 exp = TREE_OPERAND (exp, 0);
3590 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3591 if (TREE_CODE (and_mask) != INTEGER_CST)
3595 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3596 punsignedp, pvolatilep, false);
3597 if ((inner == exp && and_mask == 0)
3598 || *pbitsize < 0 || offset != 0
3599 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3602 /* If the number of bits in the reference is the same as the bitsize of
3603 the outer type, then the outer type gives the signedness. Otherwise
3604 (in case of a small bitfield) the signedness is unchanged. */
3605 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3606 *punsignedp = TYPE_UNSIGNED (outer_type);
3608 /* Compute the mask to access the bitfield. */
3609 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3610 precision = TYPE_PRECISION (unsigned_type);
3612 mask = build_int_cst_type (unsigned_type, -1);
3614 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3615 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3617 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3619 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3620 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3623 *pand_mask = and_mask;
3627 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3631 all_ones_mask_p (const_tree mask, int size)
3633 tree type = TREE_TYPE (mask);
3634 unsigned int precision = TYPE_PRECISION (type);
3637 tmask = build_int_cst_type (signed_type_for (type), -1);
3640 tree_int_cst_equal (mask,
3641 const_binop (RSHIFT_EXPR,
3642 const_binop (LSHIFT_EXPR, tmask,
3643 size_int (precision - size)),
3644 size_int (precision - size)));
3647 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3648 represents the sign bit of EXP's type. If EXP represents a sign
3649 or zero extension, also test VAL against the unextended type.
3650 The return value is the (sub)expression whose sign bit is VAL,
3651 or NULL_TREE otherwise. */
3654 sign_bit_p (tree exp, const_tree val)
3656 unsigned HOST_WIDE_INT mask_lo, lo;
3657 HOST_WIDE_INT mask_hi, hi;
3661 /* Tree EXP must have an integral type. */
3662 t = TREE_TYPE (exp);
3663 if (! INTEGRAL_TYPE_P (t))
3666 /* Tree VAL must be an integer constant. */
3667 if (TREE_CODE (val) != INTEGER_CST
3668 || TREE_OVERFLOW (val))
3671 width = TYPE_PRECISION (t);
3672 if (width > HOST_BITS_PER_WIDE_INT)
3674 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3677 mask_hi = ((unsigned HOST_WIDE_INT) -1
3678 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3684 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3687 mask_lo = ((unsigned HOST_WIDE_INT) -1
3688 >> (HOST_BITS_PER_WIDE_INT - width));
3691 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3692 treat VAL as if it were unsigned. */
3693 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3694 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3697 /* Handle extension from a narrower type. */
3698 if (TREE_CODE (exp) == NOP_EXPR
3699 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3700 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3705 /* Subroutine for fold_truthop: determine if an operand is simple enough
3706 to be evaluated unconditionally. */
3709 simple_operand_p (const_tree exp)
3711 /* Strip any conversions that don't change the machine mode. */
3714 return (CONSTANT_CLASS_P (exp)
3715 || TREE_CODE (exp) == SSA_NAME
3717 && ! TREE_ADDRESSABLE (exp)
3718 && ! TREE_THIS_VOLATILE (exp)
3719 && ! DECL_NONLOCAL (exp)
3720 /* Don't regard global variables as simple. They may be
3721 allocated in ways unknown to the compiler (shared memory,
3722 #pragma weak, etc). */
3723 && ! TREE_PUBLIC (exp)
3724 && ! DECL_EXTERNAL (exp)
3725 /* Loading a static variable is unduly expensive, but global
3726 registers aren't expensive. */
3727 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3730 /* The following functions are subroutines to fold_range_test and allow it to
3731 try to change a logical combination of comparisons into a range test.
3734 X == 2 || X == 3 || X == 4 || X == 5
3738 (unsigned) (X - 2) <= 3
3740 We describe each set of comparisons as being either inside or outside
3741 a range, using a variable named like IN_P, and then describe the
3742 range with a lower and upper bound. If one of the bounds is omitted,
3743 it represents either the highest or lowest value of the type.
3745 In the comments below, we represent a range by two numbers in brackets
3746 preceded by a "+" to designate being inside that range, or a "-" to
3747 designate being outside that range, so the condition can be inverted by
3748 flipping the prefix. An omitted bound is represented by a "-". For
3749 example, "- [-, 10]" means being outside the range starting at the lowest
3750 possible value and ending at 10, in other words, being greater than 10.
3751 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3754 We set up things so that the missing bounds are handled in a consistent
3755 manner so neither a missing bound nor "true" and "false" need to be
3756 handled using a special case. */
3758 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3759 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3760 and UPPER1_P are nonzero if the respective argument is an upper bound
3761 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3762 must be specified for a comparison. ARG1 will be converted to ARG0's
3763 type if both are specified. */
3766 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3767 tree arg1, int upper1_p)
3773 /* If neither arg represents infinity, do the normal operation.
3774 Else, if not a comparison, return infinity. Else handle the special
3775 comparison rules. Note that most of the cases below won't occur, but
3776 are handled for consistency. */
3778 if (arg0 != 0 && arg1 != 0)
3780 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3781 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3783 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3786 if (TREE_CODE_CLASS (code) != tcc_comparison)
3789 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3790 for neither. In real maths, we cannot assume open ended ranges are
3791 the same. But, this is computer arithmetic, where numbers are finite.
3792 We can therefore make the transformation of any unbounded range with
3793 the value Z, Z being greater than any representable number. This permits
3794 us to treat unbounded ranges as equal. */
3795 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3796 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3800 result = sgn0 == sgn1;
3803 result = sgn0 != sgn1;
3806 result = sgn0 < sgn1;
3809 result = sgn0 <= sgn1;
3812 result = sgn0 > sgn1;
3815 result = sgn0 >= sgn1;
3821 return constant_boolean_node (result, type);
3824 /* Given EXP, a logical expression, set the range it is testing into
3825 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3826 actually being tested. *PLOW and *PHIGH will be made of the same
3827 type as the returned expression. If EXP is not a comparison, we
3828 will most likely not be returning a useful value and range. Set
3829 *STRICT_OVERFLOW_P to true if the return value is only valid
3830 because signed overflow is undefined; otherwise, do not change
3831 *STRICT_OVERFLOW_P. */
3834 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3835 bool *strict_overflow_p)
3837 enum tree_code code;
3838 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3839 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3841 tree low, high, n_low, n_high;
3842 location_t loc = EXPR_LOCATION (exp);
3844 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3845 and see if we can refine the range. Some of the cases below may not
3846 happen, but it doesn't seem worth worrying about this. We "continue"
3847 the outer loop when we've changed something; otherwise we "break"
3848 the switch, which will "break" the while. */
3851 low = high = build_int_cst (TREE_TYPE (exp), 0);
3855 code = TREE_CODE (exp);
3856 exp_type = TREE_TYPE (exp);
3858 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3860 if (TREE_OPERAND_LENGTH (exp) > 0)
3861 arg0 = TREE_OPERAND (exp, 0);
3862 if (TREE_CODE_CLASS (code) == tcc_comparison
3863 || TREE_CODE_CLASS (code) == tcc_unary
3864 || TREE_CODE_CLASS (code) == tcc_binary)
3865 arg0_type = TREE_TYPE (arg0);
3866 if (TREE_CODE_CLASS (code) == tcc_binary
3867 || TREE_CODE_CLASS (code) == tcc_comparison
3868 || (TREE_CODE_CLASS (code) == tcc_expression
3869 && TREE_OPERAND_LENGTH (exp) > 1))
3870 arg1 = TREE_OPERAND (exp, 1);
3875 case TRUTH_NOT_EXPR:
3876 /* We can only do something if the range is testing for zero. */
3877 if (low == NULL_TREE || high == NULL_TREE
3878 || ! integer_zerop (low) || ! integer_zerop (high))
3880 in_p = ! in_p, exp = arg0;
3883 case EQ_EXPR: case NE_EXPR:
3884 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3885 /* We can only do something if the range is testing for zero
3886 and if the second operand is an integer constant. Note that
3887 saying something is "in" the range we make is done by
3888 complementing IN_P since it will set in the initial case of
3889 being not equal to zero; "out" is leaving it alone. */
3890 if (low == 0 || high == 0
3891 || ! integer_zerop (low) || ! integer_zerop (high)
3892 || TREE_CODE (arg1) != INTEGER_CST)
3897 case NE_EXPR: /* - [c, c] */
3900 case EQ_EXPR: /* + [c, c] */
3901 in_p = ! in_p, low = high = arg1;
3903 case GT_EXPR: /* - [-, c] */
3904 low = 0, high = arg1;
3906 case GE_EXPR: /* + [c, -] */
3907 in_p = ! in_p, low = arg1, high = 0;
3909 case LT_EXPR: /* - [c, -] */
3910 low = arg1, high = 0;
3912 case LE_EXPR: /* + [-, c] */
3913 in_p = ! in_p, low = 0, high = arg1;
3919 /* If this is an unsigned comparison, we also know that EXP is
3920 greater than or equal to zero. We base the range tests we make
3921 on that fact, so we record it here so we can parse existing
3922 range tests. We test arg0_type since often the return type
3923 of, e.g. EQ_EXPR, is boolean. */
3924 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3926 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3928 build_int_cst (arg0_type, 0),
3932 in_p = n_in_p, low = n_low, high = n_high;
3934 /* If the high bound is missing, but we have a nonzero low
3935 bound, reverse the range so it goes from zero to the low bound
3937 if (high == 0 && low && ! integer_zerop (low))
3940 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3941 integer_one_node, 0);
3942 low = build_int_cst (arg0_type, 0);
3950 /* (-x) IN [a,b] -> x in [-b, -a] */
3951 n_low = range_binop (MINUS_EXPR, exp_type,
3952 build_int_cst (exp_type, 0),
3954 n_high = range_binop (MINUS_EXPR, exp_type,
3955 build_int_cst (exp_type, 0),
3957 if (n_high != 0 && TREE_OVERFLOW (n_high))
3963 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3964 build_int_cst (exp_type, 1));
3967 case PLUS_EXPR: case MINUS_EXPR:
3968 if (TREE_CODE (arg1) != INTEGER_CST)
3971 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3972 move a constant to the other side. */
3973 if (!TYPE_UNSIGNED (arg0_type)
3974 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3977 /* If EXP is signed, any overflow in the computation is undefined,
3978 so we don't worry about it so long as our computations on
3979 the bounds don't overflow. For unsigned, overflow is defined
3980 and this is exactly the right thing. */
3981 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3982 arg0_type, low, 0, arg1, 0);
3983 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3984 arg0_type, high, 1, arg1, 0);
3985 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3986 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3989 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3990 *strict_overflow_p = true;
3993 /* Check for an unsigned range which has wrapped around the maximum
3994 value thus making n_high < n_low, and normalize it. */
3995 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3997 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3998 integer_one_node, 0);
3999 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4000 integer_one_node, 0);
4002 /* If the range is of the form +/- [ x+1, x ], we won't
4003 be able to normalize it. But then, it represents the
4004 whole range or the empty set, so make it
4006 if (tree_int_cst_equal (n_low, low)
4007 && tree_int_cst_equal (n_high, high))
4013 low = n_low, high = n_high;
4018 CASE_CONVERT: case NON_LVALUE_EXPR:
4019 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4022 if (! INTEGRAL_TYPE_P (arg0_type)
4023 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4024 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4027 n_low = low, n_high = high;
4030 n_low = fold_convert_loc (loc, arg0_type, n_low);
4033 n_high = fold_convert_loc (loc, arg0_type, n_high);
4036 /* If we're converting arg0 from an unsigned type, to exp,
4037 a signed type, we will be doing the comparison as unsigned.
4038 The tests above have already verified that LOW and HIGH
4041 So we have to ensure that we will handle large unsigned
4042 values the same way that the current signed bounds treat
4045 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4049 /* For fixed-point modes, we need to pass the saturating flag
4050 as the 2nd parameter. */
4051 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4052 equiv_type = lang_hooks.types.type_for_mode
4053 (TYPE_MODE (arg0_type),
4054 TYPE_SATURATING (arg0_type));
4056 equiv_type = lang_hooks.types.type_for_mode
4057 (TYPE_MODE (arg0_type), 1);
4059 /* A range without an upper bound is, naturally, unbounded.
4060 Since convert would have cropped a very large value, use
4061 the max value for the destination type. */
4063 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4064 : TYPE_MAX_VALUE (arg0_type);
4066 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4067 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4068 fold_convert_loc (loc, arg0_type,
4070 build_int_cst (arg0_type, 1));
4072 /* If the low bound is specified, "and" the range with the
4073 range for which the original unsigned value will be
4077 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4078 1, n_low, n_high, 1,
4079 fold_convert_loc (loc, arg0_type,
4084 in_p = (n_in_p == in_p);
4088 /* Otherwise, "or" the range with the range of the input
4089 that will be interpreted as negative. */
4090 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4091 0, n_low, n_high, 1,
4092 fold_convert_loc (loc, arg0_type,
4097 in_p = (in_p != n_in_p);
4102 low = n_low, high = n_high;
4112 /* If EXP is a constant, we can evaluate whether this is true or false. */
4113 if (TREE_CODE (exp) == INTEGER_CST)
4115 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4117 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4123 *pin_p = in_p, *plow = low, *phigh = high;
4127 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4128 type, TYPE, return an expression to test if EXP is in (or out of, depending
4129 on IN_P) the range. Return 0 if the test couldn't be created. */
4132 build_range_check (location_t loc, tree type, tree exp, int in_p,
4133 tree low, tree high)
4135 tree etype = TREE_TYPE (exp), value;
4137 #ifdef HAVE_canonicalize_funcptr_for_compare
4138 /* Disable this optimization for function pointer expressions
4139 on targets that require function pointer canonicalization. */
4140 if (HAVE_canonicalize_funcptr_for_compare
4141 && TREE_CODE (etype) == POINTER_TYPE
4142 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4148 value = build_range_check (loc, type, exp, 1, low, high);
4150 return invert_truthvalue_loc (loc, value);
4155 if (low == 0 && high == 0)
4156 return build_int_cst (type, 1);
4159 return fold_build2_loc (loc, LE_EXPR, type, exp,
4160 fold_convert_loc (loc, etype, high));
4163 return fold_build2_loc (loc, GE_EXPR, type, exp,
4164 fold_convert_loc (loc, etype, low));
4166 if (operand_equal_p (low, high, 0))
4167 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4168 fold_convert_loc (loc, etype, low));
4170 if (integer_zerop (low))
4172 if (! TYPE_UNSIGNED (etype))
4174 etype = unsigned_type_for (etype);
4175 high = fold_convert_loc (loc, etype, high);
4176 exp = fold_convert_loc (loc, etype, exp);
4178 return build_range_check (loc, type, exp, 1, 0, high);
4181 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4182 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4184 unsigned HOST_WIDE_INT lo;
4188 prec = TYPE_PRECISION (etype);
4189 if (prec <= HOST_BITS_PER_WIDE_INT)
4192 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4196 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4197 lo = (unsigned HOST_WIDE_INT) -1;
4200 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4202 if (TYPE_UNSIGNED (etype))
4204 tree signed_etype = signed_type_for (etype);
4205 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4207 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4209 etype = signed_etype;
4210 exp = fold_convert_loc (loc, etype, exp);
4212 return fold_build2_loc (loc, GT_EXPR, type, exp,
4213 build_int_cst (etype, 0));
4217 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4218 This requires wrap-around arithmetics for the type of the expression.
4219 First make sure that arithmetics in this type is valid, then make sure
4220 that it wraps around. */
4221 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4222 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4223 TYPE_UNSIGNED (etype));
4225 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4227 tree utype, minv, maxv;
4229 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4230 for the type in question, as we rely on this here. */
4231 utype = unsigned_type_for (etype);
4232 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4233 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4234 integer_one_node, 1);
4235 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4237 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4244 high = fold_convert_loc (loc, etype, high);
4245 low = fold_convert_loc (loc, etype, low);
4246 exp = fold_convert_loc (loc, etype, exp);
4248 value = const_binop (MINUS_EXPR, high, low);
4251 if (POINTER_TYPE_P (etype))
4253 if (value != 0 && !TREE_OVERFLOW (value))
4255 low = fold_convert_loc (loc, sizetype, low);
4256 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4257 return build_range_check (loc, type,
4258 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4260 1, build_int_cst (etype, 0), value);
4265 if (value != 0 && !TREE_OVERFLOW (value))
4266 return build_range_check (loc, type,
4267 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4268 1, build_int_cst (etype, 0), value);
4273 /* Return the predecessor of VAL in its type, handling the infinite case. */
4276 range_predecessor (tree val)
4278 tree type = TREE_TYPE (val);
4280 if (INTEGRAL_TYPE_P (type)
4281 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4284 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4287 /* Return the successor of VAL in its type, handling the infinite case. */
4290 range_successor (tree val)
4292 tree type = TREE_TYPE (val);
4294 if (INTEGRAL_TYPE_P (type)
4295 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4298 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4301 /* Given two ranges, see if we can merge them into one. Return 1 if we
4302 can, 0 if we can't. Set the output range into the specified parameters. */
4305 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4306 tree high0, int in1_p, tree low1, tree high1)
4314 int lowequal = ((low0 == 0 && low1 == 0)
4315 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4316 low0, 0, low1, 0)));
4317 int highequal = ((high0 == 0 && high1 == 0)
4318 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4319 high0, 1, high1, 1)));
4321 /* Make range 0 be the range that starts first, or ends last if they
4322 start at the same value. Swap them if it isn't. */
4323 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4326 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4327 high1, 1, high0, 1))))
4329 temp = in0_p, in0_p = in1_p, in1_p = temp;
4330 tem = low0, low0 = low1, low1 = tem;
4331 tem = high0, high0 = high1, high1 = tem;
4334 /* Now flag two cases, whether the ranges are disjoint or whether the
4335 second range is totally subsumed in the first. Note that the tests
4336 below are simplified by the ones above. */
4337 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4338 high0, 1, low1, 0));
4339 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4340 high1, 1, high0, 1));
4342 /* We now have four cases, depending on whether we are including or
4343 excluding the two ranges. */
4346 /* If they don't overlap, the result is false. If the second range
4347 is a subset it is the result. Otherwise, the range is from the start
4348 of the second to the end of the first. */
4350 in_p = 0, low = high = 0;
4352 in_p = 1, low = low1, high = high1;
4354 in_p = 1, low = low1, high = high0;
4357 else if (in0_p && ! in1_p)
4359 /* If they don't overlap, the result is the first range. If they are
4360 equal, the result is false. If the second range is a subset of the
4361 first, and the ranges begin at the same place, we go from just after
4362 the end of the second range to the end of the first. If the second
4363 range is not a subset of the first, or if it is a subset and both
4364 ranges end at the same place, the range starts at the start of the
4365 first range and ends just before the second range.
4366 Otherwise, we can't describe this as a single range. */
4368 in_p = 1, low = low0, high = high0;
4369 else if (lowequal && highequal)
4370 in_p = 0, low = high = 0;
4371 else if (subset && lowequal)
4373 low = range_successor (high1);
4378 /* We are in the weird situation where high0 > high1 but
4379 high1 has no successor. Punt. */
4383 else if (! subset || highequal)
4386 high = range_predecessor (low1);
4390 /* low0 < low1 but low1 has no predecessor. Punt. */
4398 else if (! in0_p && in1_p)
4400 /* If they don't overlap, the result is the second range. If the second
4401 is a subset of the first, the result is false. Otherwise,
4402 the range starts just after the first range and ends at the
4403 end of the second. */
4405 in_p = 1, low = low1, high = high1;
4406 else if (subset || highequal)
4407 in_p = 0, low = high = 0;
4410 low = range_successor (high0);
4415 /* high1 > high0 but high0 has no successor. Punt. */
4423 /* The case where we are excluding both ranges. Here the complex case
4424 is if they don't overlap. In that case, the only time we have a
4425 range is if they are adjacent. If the second is a subset of the
4426 first, the result is the first. Otherwise, the range to exclude
4427 starts at the beginning of the first range and ends at the end of the
4431 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4432 range_successor (high0),
4434 in_p = 0, low = low0, high = high1;
4437 /* Canonicalize - [min, x] into - [-, x]. */
4438 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4439 switch (TREE_CODE (TREE_TYPE (low0)))
4442 if (TYPE_PRECISION (TREE_TYPE (low0))
4443 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4447 if (tree_int_cst_equal (low0,
4448 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4452 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4453 && integer_zerop (low0))
4460 /* Canonicalize - [x, max] into - [x, -]. */
4461 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4462 switch (TREE_CODE (TREE_TYPE (high1)))
4465 if (TYPE_PRECISION (TREE_TYPE (high1))
4466 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4470 if (tree_int_cst_equal (high1,
4471 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4475 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4476 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4478 integer_one_node, 1)))
4485 /* The ranges might be also adjacent between the maximum and
4486 minimum values of the given type. For
4487 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4488 return + [x + 1, y - 1]. */
4489 if (low0 == 0 && high1 == 0)
4491 low = range_successor (high0);
4492 high = range_predecessor (low1);
4493 if (low == 0 || high == 0)
4503 in_p = 0, low = low0, high = high0;
4505 in_p = 0, low = low0, high = high1;
4508 *pin_p = in_p, *plow = low, *phigh = high;
4513 /* Subroutine of fold, looking inside expressions of the form
4514 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4515 of the COND_EXPR. This function is being used also to optimize
4516 A op B ? C : A, by reversing the comparison first.
4518 Return a folded expression whose code is not a COND_EXPR
4519 anymore, or NULL_TREE if no folding opportunity is found. */
4522 fold_cond_expr_with_comparison (location_t loc, tree type,
4523 tree arg0, tree arg1, tree arg2)
4525 enum tree_code comp_code = TREE_CODE (arg0);
4526 tree arg00 = TREE_OPERAND (arg0, 0);
4527 tree arg01 = TREE_OPERAND (arg0, 1);
4528 tree arg1_type = TREE_TYPE (arg1);
4534 /* If we have A op 0 ? A : -A, consider applying the following
4537 A == 0? A : -A same as -A
4538 A != 0? A : -A same as A
4539 A >= 0? A : -A same as abs (A)
4540 A > 0? A : -A same as abs (A)
4541 A <= 0? A : -A same as -abs (A)
4542 A < 0? A : -A same as -abs (A)
4544 None of these transformations work for modes with signed
4545 zeros. If A is +/-0, the first two transformations will
4546 change the sign of the result (from +0 to -0, or vice
4547 versa). The last four will fix the sign of the result,
4548 even though the original expressions could be positive or
4549 negative, depending on the sign of A.
4551 Note that all these transformations are correct if A is
4552 NaN, since the two alternatives (A and -A) are also NaNs. */
4553 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4554 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4555 ? real_zerop (arg01)
4556 : integer_zerop (arg01))
4557 && ((TREE_CODE (arg2) == NEGATE_EXPR
4558 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4559 /* In the case that A is of the form X-Y, '-A' (arg2) may
4560 have already been folded to Y-X, check for that. */
4561 || (TREE_CODE (arg1) == MINUS_EXPR
4562 && TREE_CODE (arg2) == MINUS_EXPR
4563 && operand_equal_p (TREE_OPERAND (arg1, 0),
4564 TREE_OPERAND (arg2, 1), 0)
4565 && operand_equal_p (TREE_OPERAND (arg1, 1),
4566 TREE_OPERAND (arg2, 0), 0))))
4571 tem = fold_convert_loc (loc, arg1_type, arg1);
4572 return pedantic_non_lvalue_loc (loc,
4573 fold_convert_loc (loc, type,
4574 negate_expr (tem)));
4577 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4580 if (flag_trapping_math)
4585 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4586 arg1 = fold_convert_loc (loc, signed_type_for
4587 (TREE_TYPE (arg1)), arg1);
4588 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4589 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4592 if (flag_trapping_math)
4596 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4597 arg1 = fold_convert_loc (loc, signed_type_for
4598 (TREE_TYPE (arg1)), arg1);
4599 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4600 return negate_expr (fold_convert_loc (loc, type, tem));
4602 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4606 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4607 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4608 both transformations are correct when A is NaN: A != 0
4609 is then true, and A == 0 is false. */
4611 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4612 && integer_zerop (arg01) && integer_zerop (arg2))
4614 if (comp_code == NE_EXPR)
4615 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4616 else if (comp_code == EQ_EXPR)
4617 return build_int_cst (type, 0);
4620 /* Try some transformations of A op B ? A : B.
4622 A == B? A : B same as B
4623 A != B? A : B same as A
4624 A >= B? A : B same as max (A, B)
4625 A > B? A : B same as max (B, A)
4626 A <= B? A : B same as min (A, B)
4627 A < B? A : B same as min (B, A)
4629 As above, these transformations don't work in the presence
4630 of signed zeros. For example, if A and B are zeros of
4631 opposite sign, the first two transformations will change
4632 the sign of the result. In the last four, the original
4633 expressions give different results for (A=+0, B=-0) and
4634 (A=-0, B=+0), but the transformed expressions do not.
4636 The first two transformations are correct if either A or B
4637 is a NaN. In the first transformation, the condition will
4638 be false, and B will indeed be chosen. In the case of the
4639 second transformation, the condition A != B will be true,
4640 and A will be chosen.
4642 The conversions to max() and min() are not correct if B is
4643 a number and A is not. The conditions in the original
4644 expressions will be false, so all four give B. The min()
4645 and max() versions would give a NaN instead. */
4646 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4647 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4648 /* Avoid these transformations if the COND_EXPR may be used
4649 as an lvalue in the C++ front-end. PR c++/19199. */
4651 || (strcmp (lang_hooks.name, "GNU C++") != 0
4652 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4653 || ! maybe_lvalue_p (arg1)
4654 || ! maybe_lvalue_p (arg2)))
4656 tree comp_op0 = arg00;
4657 tree comp_op1 = arg01;
4658 tree comp_type = TREE_TYPE (comp_op0);
4660 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4661 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4671 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4673 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4678 /* In C++ a ?: expression can be an lvalue, so put the
4679 operand which will be used if they are equal first
4680 so that we can convert this back to the
4681 corresponding COND_EXPR. */
4682 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4684 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4685 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4686 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4687 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4688 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4689 comp_op1, comp_op0);
4690 return pedantic_non_lvalue_loc (loc,
4691 fold_convert_loc (loc, type, tem));
4698 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4700 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4701 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4702 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4703 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4704 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4705 comp_op1, comp_op0);
4706 return pedantic_non_lvalue_loc (loc,
4707 fold_convert_loc (loc, type, tem));
4711 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4712 return pedantic_non_lvalue_loc (loc,
4713 fold_convert_loc (loc, type, arg2));
4716 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4717 return pedantic_non_lvalue_loc (loc,
4718 fold_convert_loc (loc, type, arg1));
4721 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4726 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4727 we might still be able to simplify this. For example,
4728 if C1 is one less or one more than C2, this might have started
4729 out as a MIN or MAX and been transformed by this function.
4730 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4732 if (INTEGRAL_TYPE_P (type)
4733 && TREE_CODE (arg01) == INTEGER_CST
4734 && TREE_CODE (arg2) == INTEGER_CST)
4738 if (TREE_CODE (arg1) == INTEGER_CST)
4740 /* We can replace A with C1 in this case. */
4741 arg1 = fold_convert_loc (loc, type, arg01);
4742 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4745 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4746 MIN_EXPR, to preserve the signedness of the comparison. */
4747 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4749 && operand_equal_p (arg01,
4750 const_binop (PLUS_EXPR, arg2,
4751 build_int_cst (type, 1)),
4754 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4755 fold_convert_loc (loc, TREE_TYPE (arg00),
4757 return pedantic_non_lvalue_loc (loc,
4758 fold_convert_loc (loc, type, tem));
4763 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4765 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4767 && operand_equal_p (arg01,
4768 const_binop (MINUS_EXPR, arg2,
4769 build_int_cst (type, 1)),
4772 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4773 fold_convert_loc (loc, TREE_TYPE (arg00),
4775 return pedantic_non_lvalue_loc (loc,
4776 fold_convert_loc (loc, type, tem));
4781 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4782 MAX_EXPR, to preserve the signedness of the comparison. */
4783 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4785 && operand_equal_p (arg01,
4786 const_binop (MINUS_EXPR, arg2,
4787 build_int_cst (type, 1)),
4790 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4791 fold_convert_loc (loc, TREE_TYPE (arg00),
4793 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4798 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4799 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4801 && operand_equal_p (arg01,
4802 const_binop (PLUS_EXPR, arg2,
4803 build_int_cst (type, 1)),
4806 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4807 fold_convert_loc (loc, TREE_TYPE (arg00),
4809 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4823 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4824 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4825 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4829 /* EXP is some logical combination of boolean tests. See if we can
4830 merge it into some range test. Return the new tree if so. */
4833 fold_range_test (location_t loc, enum tree_code code, tree type,
4836 int or_op = (code == TRUTH_ORIF_EXPR
4837 || code == TRUTH_OR_EXPR);
4838 int in0_p, in1_p, in_p;
4839 tree low0, low1, low, high0, high1, high;
4840 bool strict_overflow_p = false;
4841 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4842 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4844 const char * const warnmsg = G_("assuming signed overflow does not occur "
4845 "when simplifying range test");
4847 /* If this is an OR operation, invert both sides; we will invert
4848 again at the end. */
4850 in0_p = ! in0_p, in1_p = ! in1_p;
4852 /* If both expressions are the same, if we can merge the ranges, and we
4853 can build the range test, return it or it inverted. If one of the
4854 ranges is always true or always false, consider it to be the same
4855 expression as the other. */
4856 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4857 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4859 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4861 : rhs != 0 ? rhs : integer_zero_node,
4864 if (strict_overflow_p)
4865 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4866 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4869 /* On machines where the branch cost is expensive, if this is a
4870 short-circuited branch and the underlying object on both sides
4871 is the same, make a non-short-circuit operation. */
4872 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4873 && lhs != 0 && rhs != 0
4874 && (code == TRUTH_ANDIF_EXPR
4875 || code == TRUTH_ORIF_EXPR)
4876 && operand_equal_p (lhs, rhs, 0))
4878 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4879 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4880 which cases we can't do this. */
4881 if (simple_operand_p (lhs))
4882 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4883 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4886 else if (lang_hooks.decls.global_bindings_p () == 0
4887 && ! CONTAINS_PLACEHOLDER_P (lhs))
4889 tree common = save_expr (lhs);
4891 if (0 != (lhs = build_range_check (loc, type, common,
4892 or_op ? ! in0_p : in0_p,
4894 && (0 != (rhs = build_range_check (loc, type, common,
4895 or_op ? ! in1_p : in1_p,
4898 if (strict_overflow_p)
4899 fold_overflow_warning (warnmsg,
4900 WARN_STRICT_OVERFLOW_COMPARISON);
4901 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4902 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4911 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4912 bit value. Arrange things so the extra bits will be set to zero if and
4913 only if C is signed-extended to its full width. If MASK is nonzero,
4914 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4917 unextend (tree c, int p, int unsignedp, tree mask)
4919 tree type = TREE_TYPE (c);
4920 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4923 if (p == modesize || unsignedp)
4926 /* We work by getting just the sign bit into the low-order bit, then
4927 into the high-order bit, then sign-extend. We then XOR that value
4929 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4930 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4932 /* We must use a signed type in order to get an arithmetic right shift.
4933 However, we must also avoid introducing accidental overflows, so that
4934 a subsequent call to integer_zerop will work. Hence we must
4935 do the type conversion here. At this point, the constant is either
4936 zero or one, and the conversion to a signed type can never overflow.
4937 We could get an overflow if this conversion is done anywhere else. */
4938 if (TYPE_UNSIGNED (type))
4939 temp = fold_convert (signed_type_for (type), temp);
4941 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4942 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4944 temp = const_binop (BIT_AND_EXPR, temp,
4945 fold_convert (TREE_TYPE (c), mask));
4946 /* If necessary, convert the type back to match the type of C. */
4947 if (TYPE_UNSIGNED (type))
4948 temp = fold_convert (type, temp);
4950 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4953 /* For an expression that has the form
4957 we can drop one of the inner expressions and simplify to
4961 LOC is the location of the resulting expression. OP is the inner
4962 logical operation; the left-hand side in the examples above, while CMPOP
4963 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4964 removing a condition that guards another, as in
4965 (A != NULL && A->...) || A == NULL
4966 which we must not transform. If RHS_ONLY is true, only eliminate the
4967 right-most operand of the inner logical operation. */
4970 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4973 tree type = TREE_TYPE (cmpop);
4974 enum tree_code code = TREE_CODE (cmpop);
4975 enum tree_code truthop_code = TREE_CODE (op);
4976 tree lhs = TREE_OPERAND (op, 0);
4977 tree rhs = TREE_OPERAND (op, 1);
4978 tree orig_lhs = lhs, orig_rhs = rhs;
4979 enum tree_code rhs_code = TREE_CODE (rhs);
4980 enum tree_code lhs_code = TREE_CODE (lhs);
4981 enum tree_code inv_code;
4983 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4986 if (TREE_CODE_CLASS (code) != tcc_comparison)
4989 if (rhs_code == truthop_code)
4991 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4992 if (newrhs != NULL_TREE)
4995 rhs_code = TREE_CODE (rhs);
4998 if (lhs_code == truthop_code && !rhs_only)
5000 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5001 if (newlhs != NULL_TREE)
5004 lhs_code = TREE_CODE (lhs);
5008 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5009 if (inv_code == rhs_code
5010 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5011 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5013 if (!rhs_only && inv_code == lhs_code
5014 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5015 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5017 if (rhs != orig_rhs || lhs != orig_lhs)
5018 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5023 /* Find ways of folding logical expressions of LHS and RHS:
5024 Try to merge two comparisons to the same innermost item.
5025 Look for range tests like "ch >= '0' && ch <= '9'".
5026 Look for combinations of simple terms on machines with expensive branches
5027 and evaluate the RHS unconditionally.
5029 For example, if we have p->a == 2 && p->b == 4 and we can make an
5030 object large enough to span both A and B, we can do this with a comparison
5031 against the object ANDed with the a mask.
5033 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5034 operations to do this with one comparison.
5036 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5037 function and the one above.
5039 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5040 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5042 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5045 We return the simplified tree or 0 if no optimization is possible. */
5048 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5051 /* If this is the "or" of two comparisons, we can do something if
5052 the comparisons are NE_EXPR. If this is the "and", we can do something
5053 if the comparisons are EQ_EXPR. I.e.,
5054 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5056 WANTED_CODE is this operation code. For single bit fields, we can
5057 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5058 comparison for one-bit fields. */
5060 enum tree_code wanted_code;
5061 enum tree_code lcode, rcode;
5062 tree ll_arg, lr_arg, rl_arg, rr_arg;
5063 tree ll_inner, lr_inner, rl_inner, rr_inner;
5064 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5065 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5066 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5067 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5068 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5069 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5070 enum machine_mode lnmode, rnmode;
5071 tree ll_mask, lr_mask, rl_mask, rr_mask;
5072 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5073 tree l_const, r_const;
5074 tree lntype, rntype, result;
5075 HOST_WIDE_INT first_bit, end_bit;
5077 tree orig_lhs = lhs, orig_rhs = rhs;
5078 enum tree_code orig_code = code;
5080 /* Start by getting the comparison codes. Fail if anything is volatile.
5081 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5082 it were surrounded with a NE_EXPR. */
5084 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5087 lcode = TREE_CODE (lhs);
5088 rcode = TREE_CODE (rhs);
5090 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5092 lhs = build2 (NE_EXPR, truth_type, lhs,
5093 build_int_cst (TREE_TYPE (lhs), 0));
5097 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5099 rhs = build2 (NE_EXPR, truth_type, rhs,
5100 build_int_cst (TREE_TYPE (rhs), 0));
5104 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5105 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5108 ll_arg = TREE_OPERAND (lhs, 0);
5109 lr_arg = TREE_OPERAND (lhs, 1);
5110 rl_arg = TREE_OPERAND (rhs, 0);
5111 rr_arg = TREE_OPERAND (rhs, 1);
5113 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5114 if (simple_operand_p (ll_arg)
5115 && simple_operand_p (lr_arg))
5117 if (operand_equal_p (ll_arg, rl_arg, 0)
5118 && operand_equal_p (lr_arg, rr_arg, 0))
5120 result = combine_comparisons (loc, code, lcode, rcode,
5121 truth_type, ll_arg, lr_arg);
5125 else if (operand_equal_p (ll_arg, rr_arg, 0)
5126 && operand_equal_p (lr_arg, rl_arg, 0))
5128 result = combine_comparisons (loc, code, lcode,
5129 swap_tree_comparison (rcode),
5130 truth_type, ll_arg, lr_arg);
5136 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5137 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5139 /* If the RHS can be evaluated unconditionally and its operands are
5140 simple, it wins to evaluate the RHS unconditionally on machines
5141 with expensive branches. In this case, this isn't a comparison
5142 that can be merged. Avoid doing this if the RHS is a floating-point
5143 comparison since those can trap. */
5145 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5147 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5148 && simple_operand_p (rl_arg)
5149 && simple_operand_p (rr_arg))
5151 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5152 if (code == TRUTH_OR_EXPR
5153 && lcode == NE_EXPR && integer_zerop (lr_arg)
5154 && rcode == NE_EXPR && integer_zerop (rr_arg)
5155 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5156 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5157 return build2_loc (loc, NE_EXPR, truth_type,
5158 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5160 build_int_cst (TREE_TYPE (ll_arg), 0));
5162 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5163 if (code == TRUTH_AND_EXPR
5164 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5165 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5166 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5167 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5168 return build2_loc (loc, EQ_EXPR, truth_type,
5169 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5171 build_int_cst (TREE_TYPE (ll_arg), 0));
5173 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5175 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5176 return build2_loc (loc, code, truth_type, lhs, rhs);
5181 /* See if the comparisons can be merged. Then get all the parameters for
5184 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5185 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5189 ll_inner = decode_field_reference (loc, ll_arg,
5190 &ll_bitsize, &ll_bitpos, &ll_mode,
5191 &ll_unsignedp, &volatilep, &ll_mask,
5193 lr_inner = decode_field_reference (loc, lr_arg,
5194 &lr_bitsize, &lr_bitpos, &lr_mode,
5195 &lr_unsignedp, &volatilep, &lr_mask,
5197 rl_inner = decode_field_reference (loc, rl_arg,
5198 &rl_bitsize, &rl_bitpos, &rl_mode,
5199 &rl_unsignedp, &volatilep, &rl_mask,
5201 rr_inner = decode_field_reference (loc, rr_arg,
5202 &rr_bitsize, &rr_bitpos, &rr_mode,
5203 &rr_unsignedp, &volatilep, &rr_mask,
5206 /* It must be true that the inner operation on the lhs of each
5207 comparison must be the same if we are to be able to do anything.
5208 Then see if we have constants. If not, the same must be true for
5210 if (volatilep || ll_inner == 0 || rl_inner == 0
5211 || ! operand_equal_p (ll_inner, rl_inner, 0))
5214 if (TREE_CODE (lr_arg) == INTEGER_CST
5215 && TREE_CODE (rr_arg) == INTEGER_CST)
5216 l_const = lr_arg, r_const = rr_arg;
5217 else if (lr_inner == 0 || rr_inner == 0
5218 || ! operand_equal_p (lr_inner, rr_inner, 0))
5221 l_const = r_const = 0;
5223 /* If either comparison code is not correct for our logical operation,
5224 fail. However, we can convert a one-bit comparison against zero into
5225 the opposite comparison against that bit being set in the field. */
5227 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5228 if (lcode != wanted_code)
5230 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5232 /* Make the left operand unsigned, since we are only interested
5233 in the value of one bit. Otherwise we are doing the wrong
5242 /* This is analogous to the code for l_const above. */
5243 if (rcode != wanted_code)
5245 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5254 /* See if we can find a mode that contains both fields being compared on
5255 the left. If we can't, fail. Otherwise, update all constants and masks
5256 to be relative to a field of that size. */
5257 first_bit = MIN (ll_bitpos, rl_bitpos);
5258 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5259 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5260 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5262 if (lnmode == VOIDmode)
5265 lnbitsize = GET_MODE_BITSIZE (lnmode);
5266 lnbitpos = first_bit & ~ (lnbitsize - 1);
5267 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5268 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5270 if (BYTES_BIG_ENDIAN)
5272 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5273 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5276 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5277 size_int (xll_bitpos));
5278 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5279 size_int (xrl_bitpos));
5283 l_const = fold_convert_loc (loc, lntype, l_const);
5284 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5285 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5286 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5287 fold_build1_loc (loc, BIT_NOT_EXPR,
5290 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5292 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5297 r_const = fold_convert_loc (loc, lntype, r_const);
5298 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5299 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5300 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5301 fold_build1_loc (loc, BIT_NOT_EXPR,
5304 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5306 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5310 /* If the right sides are not constant, do the same for it. Also,
5311 disallow this optimization if a size or signedness mismatch occurs
5312 between the left and right sides. */
5315 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5316 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5317 /* Make sure the two fields on the right
5318 correspond to the left without being swapped. */
5319 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5322 first_bit = MIN (lr_bitpos, rr_bitpos);
5323 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5324 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5325 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5327 if (rnmode == VOIDmode)
5330 rnbitsize = GET_MODE_BITSIZE (rnmode);
5331 rnbitpos = first_bit & ~ (rnbitsize - 1);
5332 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5333 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5335 if (BYTES_BIG_ENDIAN)
5337 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5338 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5341 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5343 size_int (xlr_bitpos));
5344 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5346 size_int (xrr_bitpos));
5348 /* Make a mask that corresponds to both fields being compared.
5349 Do this for both items being compared. If the operands are the
5350 same size and the bits being compared are in the same position
5351 then we can do this by masking both and comparing the masked
5353 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5354 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5355 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5357 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5358 ll_unsignedp || rl_unsignedp);
5359 if (! all_ones_mask_p (ll_mask, lnbitsize))
5360 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5362 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5363 lr_unsignedp || rr_unsignedp);
5364 if (! all_ones_mask_p (lr_mask, rnbitsize))
5365 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5367 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5370 /* There is still another way we can do something: If both pairs of
5371 fields being compared are adjacent, we may be able to make a wider
5372 field containing them both.
5374 Note that we still must mask the lhs/rhs expressions. Furthermore,
5375 the mask must be shifted to account for the shift done by
5376 make_bit_field_ref. */
5377 if ((ll_bitsize + ll_bitpos == rl_bitpos
5378 && lr_bitsize + lr_bitpos == rr_bitpos)
5379 || (ll_bitpos == rl_bitpos + rl_bitsize
5380 && lr_bitpos == rr_bitpos + rr_bitsize))
5384 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5385 ll_bitsize + rl_bitsize,
5386 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5387 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5388 lr_bitsize + rr_bitsize,
5389 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5391 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5392 size_int (MIN (xll_bitpos, xrl_bitpos)));
5393 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5394 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5396 /* Convert to the smaller type before masking out unwanted bits. */
5398 if (lntype != rntype)
5400 if (lnbitsize > rnbitsize)
5402 lhs = fold_convert_loc (loc, rntype, lhs);
5403 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5406 else if (lnbitsize < rnbitsize)
5408 rhs = fold_convert_loc (loc, lntype, rhs);
5409 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5414 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5415 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5417 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5418 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5420 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5426 /* Handle the case of comparisons with constants. If there is something in
5427 common between the masks, those bits of the constants must be the same.
5428 If not, the condition is always false. Test for this to avoid generating
5429 incorrect code below. */
5430 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5431 if (! integer_zerop (result)
5432 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5433 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5435 if (wanted_code == NE_EXPR)
5437 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5438 return constant_boolean_node (true, truth_type);
5442 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5443 return constant_boolean_node (false, truth_type);
5447 /* Construct the expression we will return. First get the component
5448 reference we will make. Unless the mask is all ones the width of
5449 that field, perform the mask operation. Then compare with the
5451 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5452 ll_unsignedp || rl_unsignedp);
5454 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5455 if (! all_ones_mask_p (ll_mask, lnbitsize))
5456 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5458 return build2_loc (loc, wanted_code, truth_type, result,
5459 const_binop (BIT_IOR_EXPR, l_const, r_const));
5462 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5466 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5470 enum tree_code op_code;
5473 int consts_equal, consts_lt;
5476 STRIP_SIGN_NOPS (arg0);
5478 op_code = TREE_CODE (arg0);
5479 minmax_const = TREE_OPERAND (arg0, 1);
5480 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5481 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5482 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5483 inner = TREE_OPERAND (arg0, 0);
5485 /* If something does not permit us to optimize, return the original tree. */
5486 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5487 || TREE_CODE (comp_const) != INTEGER_CST
5488 || TREE_OVERFLOW (comp_const)
5489 || TREE_CODE (minmax_const) != INTEGER_CST
5490 || TREE_OVERFLOW (minmax_const))
5493 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5494 and GT_EXPR, doing the rest with recursive calls using logical
5498 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5501 = optimize_minmax_comparison (loc,
5502 invert_tree_comparison (code, false),
5505 return invert_truthvalue_loc (loc, tem);
5511 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5512 optimize_minmax_comparison
5513 (loc, EQ_EXPR, type, arg0, comp_const),
5514 optimize_minmax_comparison
5515 (loc, GT_EXPR, type, arg0, comp_const));
5518 if (op_code == MAX_EXPR && consts_equal)
5519 /* MAX (X, 0) == 0 -> X <= 0 */
5520 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5522 else if (op_code == MAX_EXPR && consts_lt)
5523 /* MAX (X, 0) == 5 -> X == 5 */
5524 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5526 else if (op_code == MAX_EXPR)
5527 /* MAX (X, 0) == -1 -> false */
5528 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5530 else if (consts_equal)
5531 /* MIN (X, 0) == 0 -> X >= 0 */
5532 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5535 /* MIN (X, 0) == 5 -> false */
5536 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5539 /* MIN (X, 0) == -1 -> X == -1 */
5540 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5543 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5544 /* MAX (X, 0) > 0 -> X > 0
5545 MAX (X, 0) > 5 -> X > 5 */
5546 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5548 else if (op_code == MAX_EXPR)
5549 /* MAX (X, 0) > -1 -> true */
5550 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5552 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5553 /* MIN (X, 0) > 0 -> false
5554 MIN (X, 0) > 5 -> false */
5555 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5558 /* MIN (X, 0) > -1 -> X > -1 */
5559 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5566 /* T is an integer expression that is being multiplied, divided, or taken a
5567 modulus (CODE says which and what kind of divide or modulus) by a
5568 constant C. See if we can eliminate that operation by folding it with
5569 other operations already in T. WIDE_TYPE, if non-null, is a type that
5570 should be used for the computation if wider than our type.
5572 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5573 (X * 2) + (Y * 4). We must, however, be assured that either the original
5574 expression would not overflow or that overflow is undefined for the type
5575 in the language in question.
5577 If we return a non-null expression, it is an equivalent form of the
5578 original computation, but need not be in the original type.
5580 We set *STRICT_OVERFLOW_P to true if the return values depends on
5581 signed overflow being undefined. Otherwise we do not change
5582 *STRICT_OVERFLOW_P. */
5585 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5586 bool *strict_overflow_p)
5588 /* To avoid exponential search depth, refuse to allow recursion past
5589 three levels. Beyond that (1) it's highly unlikely that we'll find
5590 something interesting and (2) we've probably processed it before
5591 when we built the inner expression. */
5600 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5607 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5608 bool *strict_overflow_p)
5610 tree type = TREE_TYPE (t);
5611 enum tree_code tcode = TREE_CODE (t);
5612 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5613 > GET_MODE_SIZE (TYPE_MODE (type)))
5614 ? wide_type : type);
5616 int same_p = tcode == code;
5617 tree op0 = NULL_TREE, op1 = NULL_TREE;
5618 bool sub_strict_overflow_p;
5620 /* Don't deal with constants of zero here; they confuse the code below. */
5621 if (integer_zerop (c))
5624 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5625 op0 = TREE_OPERAND (t, 0);
5627 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5628 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5630 /* Note that we need not handle conditional operations here since fold
5631 already handles those cases. So just do arithmetic here. */
5635 /* For a constant, we can always simplify if we are a multiply
5636 or (for divide and modulus) if it is a multiple of our constant. */
5637 if (code == MULT_EXPR
5638 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5639 return const_binop (code, fold_convert (ctype, t),
5640 fold_convert (ctype, c));
5643 CASE_CONVERT: case NON_LVALUE_EXPR:
5644 /* If op0 is an expression ... */
5645 if ((COMPARISON_CLASS_P (op0)
5646 || UNARY_CLASS_P (op0)
5647 || BINARY_CLASS_P (op0)
5648 || VL_EXP_CLASS_P (op0)
5649 || EXPRESSION_CLASS_P (op0))
5650 /* ... and has wrapping overflow, and its type is smaller
5651 than ctype, then we cannot pass through as widening. */
5652 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5653 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5654 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5655 && (TYPE_PRECISION (ctype)
5656 > TYPE_PRECISION (TREE_TYPE (op0))))
5657 /* ... or this is a truncation (t is narrower than op0),
5658 then we cannot pass through this narrowing. */
5659 || (TYPE_PRECISION (type)
5660 < TYPE_PRECISION (TREE_TYPE (op0)))
5661 /* ... or signedness changes for division or modulus,
5662 then we cannot pass through this conversion. */
5663 || (code != MULT_EXPR
5664 && (TYPE_UNSIGNED (ctype)
5665 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5666 /* ... or has undefined overflow while the converted to
5667 type has not, we cannot do the operation in the inner type
5668 as that would introduce undefined overflow. */
5669 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5670 && !TYPE_OVERFLOW_UNDEFINED (type))))
5673 /* Pass the constant down and see if we can make a simplification. If
5674 we can, replace this expression with the inner simplification for
5675 possible later conversion to our or some other type. */
5676 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5677 && TREE_CODE (t2) == INTEGER_CST
5678 && !TREE_OVERFLOW (t2)
5679 && (0 != (t1 = extract_muldiv (op0, t2, code,
5681 ? ctype : NULL_TREE,
5682 strict_overflow_p))))
5687 /* If widening the type changes it from signed to unsigned, then we
5688 must avoid building ABS_EXPR itself as unsigned. */
5689 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5691 tree cstype = (*signed_type_for) (ctype);
5692 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5695 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5696 return fold_convert (ctype, t1);
5700 /* If the constant is negative, we cannot simplify this. */
5701 if (tree_int_cst_sgn (c) == -1)
5705 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5707 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5710 case MIN_EXPR: case MAX_EXPR:
5711 /* If widening the type changes the signedness, then we can't perform
5712 this optimization as that changes the result. */
5713 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5716 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5717 sub_strict_overflow_p = false;
5718 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5719 &sub_strict_overflow_p)) != 0
5720 && (t2 = extract_muldiv (op1, c, code, wide_type,
5721 &sub_strict_overflow_p)) != 0)
5723 if (tree_int_cst_sgn (c) < 0)
5724 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5725 if (sub_strict_overflow_p)
5726 *strict_overflow_p = true;
5727 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5728 fold_convert (ctype, t2));
5732 case LSHIFT_EXPR: case RSHIFT_EXPR:
5733 /* If the second operand is constant, this is a multiplication
5734 or floor division, by a power of two, so we can treat it that
5735 way unless the multiplier or divisor overflows. Signed
5736 left-shift overflow is implementation-defined rather than
5737 undefined in C90, so do not convert signed left shift into
5739 if (TREE_CODE (op1) == INTEGER_CST
5740 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5741 /* const_binop may not detect overflow correctly,
5742 so check for it explicitly here. */
5743 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5744 && TREE_INT_CST_HIGH (op1) == 0
5745 && 0 != (t1 = fold_convert (ctype,
5746 const_binop (LSHIFT_EXPR,
5749 && !TREE_OVERFLOW (t1))
5750 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5751 ? MULT_EXPR : FLOOR_DIV_EXPR,
5753 fold_convert (ctype, op0),
5755 c, code, wide_type, strict_overflow_p);
5758 case PLUS_EXPR: case MINUS_EXPR:
5759 /* See if we can eliminate the operation on both sides. If we can, we
5760 can return a new PLUS or MINUS. If we can't, the only remaining
5761 cases where we can do anything are if the second operand is a
5763 sub_strict_overflow_p = false;
5764 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5765 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5766 if (t1 != 0 && t2 != 0
5767 && (code == MULT_EXPR
5768 /* If not multiplication, we can only do this if both operands
5769 are divisible by c. */
5770 || (multiple_of_p (ctype, op0, c)
5771 && multiple_of_p (ctype, op1, c))))
5773 if (sub_strict_overflow_p)
5774 *strict_overflow_p = true;
5775 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5776 fold_convert (ctype, t2));
5779 /* If this was a subtraction, negate OP1 and set it to be an addition.
5780 This simplifies the logic below. */
5781 if (tcode == MINUS_EXPR)
5783 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5784 /* If OP1 was not easily negatable, the constant may be OP0. */
5785 if (TREE_CODE (op0) == INTEGER_CST)
5796 if (TREE_CODE (op1) != INTEGER_CST)
5799 /* If either OP1 or C are negative, this optimization is not safe for
5800 some of the division and remainder types while for others we need
5801 to change the code. */
5802 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5804 if (code == CEIL_DIV_EXPR)
5805 code = FLOOR_DIV_EXPR;
5806 else if (code == FLOOR_DIV_EXPR)
5807 code = CEIL_DIV_EXPR;
5808 else if (code != MULT_EXPR
5809 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5813 /* If it's a multiply or a division/modulus operation of a multiple
5814 of our constant, do the operation and verify it doesn't overflow. */
5815 if (code == MULT_EXPR
5816 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5818 op1 = const_binop (code, fold_convert (ctype, op1),
5819 fold_convert (ctype, c));
5820 /* We allow the constant to overflow with wrapping semantics. */
5822 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5828 /* If we have an unsigned type is not a sizetype, we cannot widen
5829 the operation since it will change the result if the original
5830 computation overflowed. */
5831 if (TYPE_UNSIGNED (ctype)
5832 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5836 /* If we were able to eliminate our operation from the first side,
5837 apply our operation to the second side and reform the PLUS. */
5838 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5839 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5841 /* The last case is if we are a multiply. In that case, we can
5842 apply the distributive law to commute the multiply and addition
5843 if the multiplication of the constants doesn't overflow. */
5844 if (code == MULT_EXPR)
5845 return fold_build2 (tcode, ctype,
5846 fold_build2 (code, ctype,
5847 fold_convert (ctype, op0),
5848 fold_convert (ctype, c)),
5854 /* We have a special case here if we are doing something like
5855 (C * 8) % 4 since we know that's zero. */
5856 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5857 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5858 /* If the multiplication can overflow we cannot optimize this.
5859 ??? Until we can properly mark individual operations as
5860 not overflowing we need to treat sizetype special here as
5861 stor-layout relies on this opimization to make
5862 DECL_FIELD_BIT_OFFSET always a constant. */
5863 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5864 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5865 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5866 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5867 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5869 *strict_overflow_p = true;
5870 return omit_one_operand (type, integer_zero_node, op0);
5873 /* ... fall through ... */
5875 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5876 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5877 /* If we can extract our operation from the LHS, do so and return a
5878 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5879 do something only if the second operand is a constant. */
5881 && (t1 = extract_muldiv (op0, c, code, wide_type,
5882 strict_overflow_p)) != 0)
5883 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5884 fold_convert (ctype, op1));
5885 else if (tcode == MULT_EXPR && code == MULT_EXPR
5886 && (t1 = extract_muldiv (op1, c, code, wide_type,
5887 strict_overflow_p)) != 0)
5888 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5889 fold_convert (ctype, t1));
5890 else if (TREE_CODE (op1) != INTEGER_CST)
5893 /* If these are the same operation types, we can associate them
5894 assuming no overflow. */
5896 && 0 != (t1 = int_const_binop (MULT_EXPR,
5897 fold_convert (ctype, op1),
5898 fold_convert (ctype, c), 1))
5899 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5900 (TYPE_UNSIGNED (ctype)
5901 && tcode != MULT_EXPR) ? -1 : 1,
5902 TREE_OVERFLOW (t1)))
5903 && !TREE_OVERFLOW (t1))
5904 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5906 /* If these operations "cancel" each other, we have the main
5907 optimizations of this pass, which occur when either constant is a
5908 multiple of the other, in which case we replace this with either an
5909 operation or CODE or TCODE.
5911 If we have an unsigned type that is not a sizetype, we cannot do
5912 this since it will change the result if the original computation
5914 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5915 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5916 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5917 || (tcode == MULT_EXPR
5918 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5919 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5920 && code != MULT_EXPR)))
5922 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5924 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5925 *strict_overflow_p = true;
5926 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5927 fold_convert (ctype,
5928 const_binop (TRUNC_DIV_EXPR,
5931 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5933 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5934 *strict_overflow_p = true;
5935 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5936 fold_convert (ctype,
5937 const_binop (TRUNC_DIV_EXPR,
5950 /* Return a node which has the indicated constant VALUE (either 0 or
5951 1), and is of the indicated TYPE. */
5954 constant_boolean_node (int value, tree type)
5956 if (type == integer_type_node)
5957 return value ? integer_one_node : integer_zero_node;
5958 else if (type == boolean_type_node)
5959 return value ? boolean_true_node : boolean_false_node;
5961 return build_int_cst_type (type, value);
5963 return build_int_cst (NULL_TREE, value);
5967 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5968 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5969 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5970 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5971 COND is the first argument to CODE; otherwise (as in the example
5972 given here), it is the second argument. TYPE is the type of the
5973 original expression. Return NULL_TREE if no simplification is
5977 fold_binary_op_with_conditional_arg (location_t loc,
5978 enum tree_code code,
5979 tree type, tree op0, tree op1,
5980 tree cond, tree arg, int cond_first_p)
5982 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5983 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5984 tree test, true_value, false_value;
5985 tree lhs = NULL_TREE;
5986 tree rhs = NULL_TREE;
5988 if (TREE_CODE (cond) == COND_EXPR)
5990 test = TREE_OPERAND (cond, 0);
5991 true_value = TREE_OPERAND (cond, 1);
5992 false_value = TREE_OPERAND (cond, 2);
5993 /* If this operand throws an expression, then it does not make
5994 sense to try to perform a logical or arithmetic operation
5996 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5998 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6003 tree testtype = TREE_TYPE (cond);
6005 true_value = constant_boolean_node (true, testtype);
6006 false_value = constant_boolean_node (false, testtype);
6009 /* This transformation is only worthwhile if we don't have to wrap ARG
6010 in a SAVE_EXPR and the operation can be simplified without recursing
6011 on at least one of the branches once its pushed inside the COND_EXPR. */
6012 if (!TREE_CONSTANT (arg)
6013 && (TREE_SIDE_EFFECTS (arg)
6014 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6015 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6018 arg = fold_convert_loc (loc, arg_type, arg);
6021 true_value = fold_convert_loc (loc, cond_type, true_value);
6023 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6025 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6029 false_value = fold_convert_loc (loc, cond_type, false_value);
6031 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6033 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6036 /* Check that we have simplified at least one of the branches. */
6037 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6040 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6044 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6046 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6047 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6048 ADDEND is the same as X.
6050 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6051 and finite. The problematic cases are when X is zero, and its mode
6052 has signed zeros. In the case of rounding towards -infinity,
6053 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6054 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6057 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6059 if (!real_zerop (addend))
6062 /* Don't allow the fold with -fsignaling-nans. */
6063 if (HONOR_SNANS (TYPE_MODE (type)))
6066 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6067 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6070 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6071 if (TREE_CODE (addend) == REAL_CST
6072 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6075 /* The mode has signed zeros, and we have to honor their sign.
6076 In this situation, there is only one case we can return true for.
6077 X - 0 is the same as X unless rounding towards -infinity is
6079 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6082 /* Subroutine of fold() that checks comparisons of built-in math
6083 functions against real constants.
6085 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6086 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6087 is the type of the result and ARG0 and ARG1 are the operands of the
6088 comparison. ARG1 must be a TREE_REAL_CST.
6090 The function returns the constant folded tree if a simplification
6091 can be made, and NULL_TREE otherwise. */
6094 fold_mathfn_compare (location_t loc,
6095 enum built_in_function fcode, enum tree_code code,
6096 tree type, tree arg0, tree arg1)
6100 if (BUILTIN_SQRT_P (fcode))
6102 tree arg = CALL_EXPR_ARG (arg0, 0);
6103 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6105 c = TREE_REAL_CST (arg1);
6106 if (REAL_VALUE_NEGATIVE (c))
6108 /* sqrt(x) < y is always false, if y is negative. */
6109 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6110 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6112 /* sqrt(x) > y is always true, if y is negative and we
6113 don't care about NaNs, i.e. negative values of x. */
6114 if (code == NE_EXPR || !HONOR_NANS (mode))
6115 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6117 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6118 return fold_build2_loc (loc, GE_EXPR, type, arg,
6119 build_real (TREE_TYPE (arg), dconst0));
6121 else if (code == GT_EXPR || code == GE_EXPR)
6125 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6126 real_convert (&c2, mode, &c2);
6128 if (REAL_VALUE_ISINF (c2))
6130 /* sqrt(x) > y is x == +Inf, when y is very large. */
6131 if (HONOR_INFINITIES (mode))
6132 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6133 build_real (TREE_TYPE (arg), c2));
6135 /* sqrt(x) > y is always false, when y is very large
6136 and we don't care about infinities. */
6137 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6140 /* sqrt(x) > c is the same as x > c*c. */
6141 return fold_build2_loc (loc, code, type, arg,
6142 build_real (TREE_TYPE (arg), c2));
6144 else if (code == LT_EXPR || code == LE_EXPR)
6148 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6149 real_convert (&c2, mode, &c2);
6151 if (REAL_VALUE_ISINF (c2))
6153 /* sqrt(x) < y is always true, when y is a very large
6154 value and we don't care about NaNs or Infinities. */
6155 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6156 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6158 /* sqrt(x) < y is x != +Inf when y is very large and we
6159 don't care about NaNs. */
6160 if (! HONOR_NANS (mode))
6161 return fold_build2_loc (loc, NE_EXPR, type, arg,
6162 build_real (TREE_TYPE (arg), c2));
6164 /* sqrt(x) < y is x >= 0 when y is very large and we
6165 don't care about Infinities. */
6166 if (! HONOR_INFINITIES (mode))
6167 return fold_build2_loc (loc, GE_EXPR, type, arg,
6168 build_real (TREE_TYPE (arg), dconst0));
6170 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6171 if (lang_hooks.decls.global_bindings_p () != 0
6172 || CONTAINS_PLACEHOLDER_P (arg))
6175 arg = save_expr (arg);
6176 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6177 fold_build2_loc (loc, GE_EXPR, type, arg,
6178 build_real (TREE_TYPE (arg),
6180 fold_build2_loc (loc, NE_EXPR, type, arg,
6181 build_real (TREE_TYPE (arg),
6185 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6186 if (! HONOR_NANS (mode))
6187 return fold_build2_loc (loc, code, type, arg,
6188 build_real (TREE_TYPE (arg), c2));
6190 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6191 if (lang_hooks.decls.global_bindings_p () == 0
6192 && ! CONTAINS_PLACEHOLDER_P (arg))
6194 arg = save_expr (arg);
6195 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6196 fold_build2_loc (loc, GE_EXPR, type, arg,
6197 build_real (TREE_TYPE (arg),
6199 fold_build2_loc (loc, code, type, arg,
6200 build_real (TREE_TYPE (arg),
6209 /* Subroutine of fold() that optimizes comparisons against Infinities,
6210 either +Inf or -Inf.
6212 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6213 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6214 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6216 The function returns the constant folded tree if a simplification
6217 can be made, and NULL_TREE otherwise. */
6220 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6221 tree arg0, tree arg1)
6223 enum machine_mode mode;
6224 REAL_VALUE_TYPE max;
6228 mode = TYPE_MODE (TREE_TYPE (arg0));
6230 /* For negative infinity swap the sense of the comparison. */
6231 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6233 code = swap_tree_comparison (code);
6238 /* x > +Inf is always false, if with ignore sNANs. */
6239 if (HONOR_SNANS (mode))
6241 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6244 /* x <= +Inf is always true, if we don't case about NaNs. */
6245 if (! HONOR_NANS (mode))
6246 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6248 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6249 if (lang_hooks.decls.global_bindings_p () == 0
6250 && ! CONTAINS_PLACEHOLDER_P (arg0))
6252 arg0 = save_expr (arg0);
6253 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6259 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6260 real_maxval (&max, neg, mode);
6261 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6262 arg0, build_real (TREE_TYPE (arg0), max));
6265 /* x < +Inf is always equal to x <= DBL_MAX. */
6266 real_maxval (&max, neg, mode);
6267 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6268 arg0, build_real (TREE_TYPE (arg0), max));
6271 /* x != +Inf is always equal to !(x > DBL_MAX). */
6272 real_maxval (&max, neg, mode);
6273 if (! HONOR_NANS (mode))
6274 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6275 arg0, build_real (TREE_TYPE (arg0), max));
6277 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6278 arg0, build_real (TREE_TYPE (arg0), max));
6279 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6288 /* Subroutine of fold() that optimizes comparisons of a division by
6289 a nonzero integer constant against an integer constant, i.e.
6292 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6293 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6294 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6296 The function returns the constant folded tree if a simplification
6297 can be made, and NULL_TREE otherwise. */
6300 fold_div_compare (location_t loc,
6301 enum tree_code code, tree type, tree arg0, tree arg1)
6303 tree prod, tmp, hi, lo;
6304 tree arg00 = TREE_OPERAND (arg0, 0);
6305 tree arg01 = TREE_OPERAND (arg0, 1);
6307 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6311 /* We have to do this the hard way to detect unsigned overflow.
6312 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6313 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6314 TREE_INT_CST_HIGH (arg01),
6315 TREE_INT_CST_LOW (arg1),
6316 TREE_INT_CST_HIGH (arg1),
6317 &val.low, &val.high, unsigned_p);
6318 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6319 neg_overflow = false;
6323 tmp = int_const_binop (MINUS_EXPR, arg01,
6324 build_int_cst (TREE_TYPE (arg01), 1), 0);
6327 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6328 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6329 TREE_INT_CST_HIGH (prod),
6330 TREE_INT_CST_LOW (tmp),
6331 TREE_INT_CST_HIGH (tmp),
6332 &val.low, &val.high, unsigned_p);
6333 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6334 -1, overflow | TREE_OVERFLOW (prod));
6336 else if (tree_int_cst_sgn (arg01) >= 0)
6338 tmp = int_const_binop (MINUS_EXPR, arg01,
6339 build_int_cst (TREE_TYPE (arg01), 1), 0);
6340 switch (tree_int_cst_sgn (arg1))
6343 neg_overflow = true;
6344 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6349 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6354 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6364 /* A negative divisor reverses the relational operators. */
6365 code = swap_tree_comparison (code);
6367 tmp = int_const_binop (PLUS_EXPR, arg01,
6368 build_int_cst (TREE_TYPE (arg01), 1), 0);
6369 switch (tree_int_cst_sgn (arg1))
6372 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6377 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6382 neg_overflow = true;
6383 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6395 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6396 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6397 if (TREE_OVERFLOW (hi))
6398 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6399 if (TREE_OVERFLOW (lo))
6400 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6401 return build_range_check (loc, type, arg00, 1, lo, hi);
6404 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6405 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6406 if (TREE_OVERFLOW (hi))
6407 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6408 if (TREE_OVERFLOW (lo))
6409 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6410 return build_range_check (loc, type, arg00, 0, lo, hi);
6413 if (TREE_OVERFLOW (lo))
6415 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6416 return omit_one_operand_loc (loc, type, tmp, arg00);
6418 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6421 if (TREE_OVERFLOW (hi))
6423 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6424 return omit_one_operand_loc (loc, type, tmp, arg00);
6426 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6429 if (TREE_OVERFLOW (hi))
6431 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6432 return omit_one_operand_loc (loc, type, tmp, arg00);
6434 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6437 if (TREE_OVERFLOW (lo))
6439 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6440 return omit_one_operand_loc (loc, type, tmp, arg00);
6442 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6452 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6453 equality/inequality test, then return a simplified form of the test
6454 using a sign testing. Otherwise return NULL. TYPE is the desired
6458 fold_single_bit_test_into_sign_test (location_t loc,
6459 enum tree_code code, tree arg0, tree arg1,
6462 /* If this is testing a single bit, we can optimize the test. */
6463 if ((code == NE_EXPR || code == EQ_EXPR)
6464 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6465 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6467 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6468 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6469 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6471 if (arg00 != NULL_TREE
6472 /* This is only a win if casting to a signed type is cheap,
6473 i.e. when arg00's type is not a partial mode. */
6474 && TYPE_PRECISION (TREE_TYPE (arg00))
6475 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6477 tree stype = signed_type_for (TREE_TYPE (arg00));
6478 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6480 fold_convert_loc (loc, stype, arg00),
6481 build_int_cst (stype, 0));
6488 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6489 equality/inequality test, then return a simplified form of
6490 the test using shifts and logical operations. Otherwise return
6491 NULL. TYPE is the desired result type. */
6494 fold_single_bit_test (location_t loc, enum tree_code code,
6495 tree arg0, tree arg1, tree result_type)
6497 /* If this is testing a single bit, we can optimize the test. */
6498 if ((code == NE_EXPR || code == EQ_EXPR)
6499 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6500 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6502 tree inner = TREE_OPERAND (arg0, 0);
6503 tree type = TREE_TYPE (arg0);
6504 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6505 enum machine_mode operand_mode = TYPE_MODE (type);
6507 tree signed_type, unsigned_type, intermediate_type;
6510 /* First, see if we can fold the single bit test into a sign-bit
6512 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6517 /* Otherwise we have (A & C) != 0 where C is a single bit,
6518 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6519 Similarly for (A & C) == 0. */
6521 /* If INNER is a right shift of a constant and it plus BITNUM does
6522 not overflow, adjust BITNUM and INNER. */
6523 if (TREE_CODE (inner) == RSHIFT_EXPR
6524 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6525 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6526 && bitnum < TYPE_PRECISION (type)
6527 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6528 bitnum - TYPE_PRECISION (type)))
6530 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6531 inner = TREE_OPERAND (inner, 0);
6534 /* If we are going to be able to omit the AND below, we must do our
6535 operations as unsigned. If we must use the AND, we have a choice.
6536 Normally unsigned is faster, but for some machines signed is. */
6537 #ifdef LOAD_EXTEND_OP
6538 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6539 && !flag_syntax_only) ? 0 : 1;
6544 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6545 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6546 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6547 inner = fold_convert_loc (loc, intermediate_type, inner);
6550 inner = build2 (RSHIFT_EXPR, intermediate_type,
6551 inner, size_int (bitnum));
6553 one = build_int_cst (intermediate_type, 1);
6555 if (code == EQ_EXPR)
6556 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6558 /* Put the AND last so it can combine with more things. */
6559 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6561 /* Make sure to return the proper type. */
6562 inner = fold_convert_loc (loc, result_type, inner);
6569 /* Check whether we are allowed to reorder operands arg0 and arg1,
6570 such that the evaluation of arg1 occurs before arg0. */
6573 reorder_operands_p (const_tree arg0, const_tree arg1)
6575 if (! flag_evaluation_order)
6577 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6579 return ! TREE_SIDE_EFFECTS (arg0)
6580 && ! TREE_SIDE_EFFECTS (arg1);
6583 /* Test whether it is preferable two swap two operands, ARG0 and
6584 ARG1, for example because ARG0 is an integer constant and ARG1
6585 isn't. If REORDER is true, only recommend swapping if we can
6586 evaluate the operands in reverse order. */
6589 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6591 STRIP_SIGN_NOPS (arg0);
6592 STRIP_SIGN_NOPS (arg1);
6594 if (TREE_CODE (arg1) == INTEGER_CST)
6596 if (TREE_CODE (arg0) == INTEGER_CST)
6599 if (TREE_CODE (arg1) == REAL_CST)
6601 if (TREE_CODE (arg0) == REAL_CST)
6604 if (TREE_CODE (arg1) == FIXED_CST)
6606 if (TREE_CODE (arg0) == FIXED_CST)
6609 if (TREE_CODE (arg1) == COMPLEX_CST)
6611 if (TREE_CODE (arg0) == COMPLEX_CST)
6614 if (TREE_CONSTANT (arg1))
6616 if (TREE_CONSTANT (arg0))
6619 if (optimize_function_for_size_p (cfun))
6622 if (reorder && flag_evaluation_order
6623 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6626 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6627 for commutative and comparison operators. Ensuring a canonical
6628 form allows the optimizers to find additional redundancies without
6629 having to explicitly check for both orderings. */
6630 if (TREE_CODE (arg0) == SSA_NAME
6631 && TREE_CODE (arg1) == SSA_NAME
6632 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6635 /* Put SSA_NAMEs last. */
6636 if (TREE_CODE (arg1) == SSA_NAME)
6638 if (TREE_CODE (arg0) == SSA_NAME)
6641 /* Put variables last. */
6650 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6651 ARG0 is extended to a wider type. */
6654 fold_widened_comparison (location_t loc, enum tree_code code,
6655 tree type, tree arg0, tree arg1)
6657 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6659 tree shorter_type, outer_type;
6663 if (arg0_unw == arg0)
6665 shorter_type = TREE_TYPE (arg0_unw);
6667 #ifdef HAVE_canonicalize_funcptr_for_compare
6668 /* Disable this optimization if we're casting a function pointer
6669 type on targets that require function pointer canonicalization. */
6670 if (HAVE_canonicalize_funcptr_for_compare
6671 && TREE_CODE (shorter_type) == POINTER_TYPE
6672 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6676 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6679 arg1_unw = get_unwidened (arg1, NULL_TREE);
6681 /* If possible, express the comparison in the shorter mode. */
6682 if ((code == EQ_EXPR || code == NE_EXPR
6683 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6684 && (TREE_TYPE (arg1_unw) == shorter_type
6685 || ((TYPE_PRECISION (shorter_type)
6686 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6687 && (TYPE_UNSIGNED (shorter_type)
6688 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6689 || (TREE_CODE (arg1_unw) == INTEGER_CST
6690 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6691 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6692 && int_fits_type_p (arg1_unw, shorter_type))))
6693 return fold_build2_loc (loc, code, type, arg0_unw,
6694 fold_convert_loc (loc, shorter_type, arg1_unw));
6696 if (TREE_CODE (arg1_unw) != INTEGER_CST
6697 || TREE_CODE (shorter_type) != INTEGER_TYPE
6698 || !int_fits_type_p (arg1_unw, shorter_type))
6701 /* If we are comparing with the integer that does not fit into the range
6702 of the shorter type, the result is known. */
6703 outer_type = TREE_TYPE (arg1_unw);
6704 min = lower_bound_in_type (outer_type, shorter_type);
6705 max = upper_bound_in_type (outer_type, shorter_type);
6707 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6709 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6716 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6721 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6727 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6729 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6734 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6736 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6745 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6746 ARG0 just the signedness is changed. */
6749 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6750 tree arg0, tree arg1)
6753 tree inner_type, outer_type;
6755 if (!CONVERT_EXPR_P (arg0))
6758 outer_type = TREE_TYPE (arg0);
6759 arg0_inner = TREE_OPERAND (arg0, 0);
6760 inner_type = TREE_TYPE (arg0_inner);
6762 #ifdef HAVE_canonicalize_funcptr_for_compare
6763 /* Disable this optimization if we're casting a function pointer
6764 type on targets that require function pointer canonicalization. */
6765 if (HAVE_canonicalize_funcptr_for_compare
6766 && TREE_CODE (inner_type) == POINTER_TYPE
6767 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6771 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6774 if (TREE_CODE (arg1) != INTEGER_CST
6775 && !(CONVERT_EXPR_P (arg1)
6776 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6779 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6784 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6787 if (TREE_CODE (arg1) == INTEGER_CST)
6788 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6789 0, TREE_OVERFLOW (arg1));
6791 arg1 = fold_convert_loc (loc, inner_type, arg1);
6793 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6796 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6797 step of the array. Reconstructs s and delta in the case of s *
6798 delta being an integer constant (and thus already folded). ADDR is
6799 the address. MULT is the multiplicative expression. If the
6800 function succeeds, the new address expression is returned.
6801 Otherwise NULL_TREE is returned. LOC is the location of the
6802 resulting expression. */
6805 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6807 tree s, delta, step;
6808 tree ref = TREE_OPERAND (addr, 0), pref;
6813 /* Strip the nops that might be added when converting op1 to sizetype. */
6816 /* Canonicalize op1 into a possibly non-constant delta
6817 and an INTEGER_CST s. */
6818 if (TREE_CODE (op1) == MULT_EXPR)
6820 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6825 if (TREE_CODE (arg0) == INTEGER_CST)
6830 else if (TREE_CODE (arg1) == INTEGER_CST)
6838 else if (TREE_CODE (op1) == INTEGER_CST)
6845 /* Simulate we are delta * 1. */
6847 s = integer_one_node;
6850 for (;; ref = TREE_OPERAND (ref, 0))
6852 if (TREE_CODE (ref) == ARRAY_REF)
6856 /* Remember if this was a multi-dimensional array. */
6857 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6860 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6863 itype = TREE_TYPE (domain);
6865 step = array_ref_element_size (ref);
6866 if (TREE_CODE (step) != INTEGER_CST)
6871 if (! tree_int_cst_equal (step, s))
6876 /* Try if delta is a multiple of step. */
6877 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6883 /* Only fold here if we can verify we do not overflow one
6884 dimension of a multi-dimensional array. */
6889 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6890 || !TYPE_MAX_VALUE (domain)
6891 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6894 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6895 fold_convert_loc (loc, itype,
6896 TREE_OPERAND (ref, 1)),
6897 fold_convert_loc (loc, itype, delta));
6899 || TREE_CODE (tmp) != INTEGER_CST
6900 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6909 if (!handled_component_p (ref))
6913 /* We found the suitable array reference. So copy everything up to it,
6914 and replace the index. */
6916 pref = TREE_OPERAND (addr, 0);
6917 ret = copy_node (pref);
6918 SET_EXPR_LOCATION (ret, loc);
6923 pref = TREE_OPERAND (pref, 0);
6924 TREE_OPERAND (pos, 0) = copy_node (pref);
6925 pos = TREE_OPERAND (pos, 0);
6928 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6929 fold_convert_loc (loc, itype,
6930 TREE_OPERAND (pos, 1)),
6931 fold_convert_loc (loc, itype, delta));
6933 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6937 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6938 means A >= Y && A != MAX, but in this case we know that
6939 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6942 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6944 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6946 if (TREE_CODE (bound) == LT_EXPR)
6947 a = TREE_OPERAND (bound, 0);
6948 else if (TREE_CODE (bound) == GT_EXPR)
6949 a = TREE_OPERAND (bound, 1);
6953 typea = TREE_TYPE (a);
6954 if (!INTEGRAL_TYPE_P (typea)
6955 && !POINTER_TYPE_P (typea))
6958 if (TREE_CODE (ineq) == LT_EXPR)
6960 a1 = TREE_OPERAND (ineq, 1);
6961 y = TREE_OPERAND (ineq, 0);
6963 else if (TREE_CODE (ineq) == GT_EXPR)
6965 a1 = TREE_OPERAND (ineq, 0);
6966 y = TREE_OPERAND (ineq, 1);
6971 if (TREE_TYPE (a1) != typea)
6974 if (POINTER_TYPE_P (typea))
6976 /* Convert the pointer types into integer before taking the difference. */
6977 tree ta = fold_convert_loc (loc, ssizetype, a);
6978 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6979 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6982 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6984 if (!diff || !integer_onep (diff))
6987 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6990 /* Fold a sum or difference of at least one multiplication.
6991 Returns the folded tree or NULL if no simplification could be made. */
6994 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6995 tree arg0, tree arg1)
6997 tree arg00, arg01, arg10, arg11;
6998 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7000 /* (A * C) +- (B * C) -> (A+-B) * C.
7001 (A * C) +- A -> A * (C+-1).
7002 We are most concerned about the case where C is a constant,
7003 but other combinations show up during loop reduction. Since
7004 it is not difficult, try all four possibilities. */
7006 if (TREE_CODE (arg0) == MULT_EXPR)
7008 arg00 = TREE_OPERAND (arg0, 0);
7009 arg01 = TREE_OPERAND (arg0, 1);
7011 else if (TREE_CODE (arg0) == INTEGER_CST)
7013 arg00 = build_one_cst (type);
7018 /* We cannot generate constant 1 for fract. */
7019 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7022 arg01 = build_one_cst (type);
7024 if (TREE_CODE (arg1) == MULT_EXPR)
7026 arg10 = TREE_OPERAND (arg1, 0);
7027 arg11 = TREE_OPERAND (arg1, 1);
7029 else if (TREE_CODE (arg1) == INTEGER_CST)
7031 arg10 = build_one_cst (type);
7032 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7033 the purpose of this canonicalization. */
7034 if (TREE_INT_CST_HIGH (arg1) == -1
7035 && negate_expr_p (arg1)
7036 && code == PLUS_EXPR)
7038 arg11 = negate_expr (arg1);
7046 /* We cannot generate constant 1 for fract. */
7047 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7050 arg11 = build_one_cst (type);
7054 if (operand_equal_p (arg01, arg11, 0))
7055 same = arg01, alt0 = arg00, alt1 = arg10;
7056 else if (operand_equal_p (arg00, arg10, 0))
7057 same = arg00, alt0 = arg01, alt1 = arg11;
7058 else if (operand_equal_p (arg00, arg11, 0))
7059 same = arg00, alt0 = arg01, alt1 = arg10;
7060 else if (operand_equal_p (arg01, arg10, 0))
7061 same = arg01, alt0 = arg00, alt1 = arg11;
7063 /* No identical multiplicands; see if we can find a common
7064 power-of-two factor in non-power-of-two multiplies. This
7065 can help in multi-dimensional array access. */
7066 else if (host_integerp (arg01, 0)
7067 && host_integerp (arg11, 0))
7069 HOST_WIDE_INT int01, int11, tmp;
7072 int01 = TREE_INT_CST_LOW (arg01);
7073 int11 = TREE_INT_CST_LOW (arg11);
7075 /* Move min of absolute values to int11. */
7076 if ((int01 >= 0 ? int01 : -int01)
7077 < (int11 >= 0 ? int11 : -int11))
7079 tmp = int01, int01 = int11, int11 = tmp;
7080 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7087 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7088 /* The remainder should not be a constant, otherwise we
7089 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7090 increased the number of multiplications necessary. */
7091 && TREE_CODE (arg10) != INTEGER_CST)
7093 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7094 build_int_cst (TREE_TYPE (arg00),
7099 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7104 return fold_build2_loc (loc, MULT_EXPR, type,
7105 fold_build2_loc (loc, code, type,
7106 fold_convert_loc (loc, type, alt0),
7107 fold_convert_loc (loc, type, alt1)),
7108 fold_convert_loc (loc, type, same));
7113 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7114 specified by EXPR into the buffer PTR of length LEN bytes.
7115 Return the number of bytes placed in the buffer, or zero
7119 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7121 tree type = TREE_TYPE (expr);
7122 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7123 int byte, offset, word, words;
7124 unsigned char value;
7126 if (total_bytes > len)
7128 words = total_bytes / UNITS_PER_WORD;
7130 for (byte = 0; byte < total_bytes; byte++)
7132 int bitpos = byte * BITS_PER_UNIT;
7133 if (bitpos < HOST_BITS_PER_WIDE_INT)
7134 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7136 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7137 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7139 if (total_bytes > UNITS_PER_WORD)
7141 word = byte / UNITS_PER_WORD;
7142 if (WORDS_BIG_ENDIAN)
7143 word = (words - 1) - word;
7144 offset = word * UNITS_PER_WORD;
7145 if (BYTES_BIG_ENDIAN)
7146 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7148 offset += byte % UNITS_PER_WORD;
7151 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7152 ptr[offset] = value;
7158 /* Subroutine of native_encode_expr. Encode the REAL_CST
7159 specified by EXPR into the buffer PTR of length LEN bytes.
7160 Return the number of bytes placed in the buffer, or zero
7164 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7166 tree type = TREE_TYPE (expr);
7167 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7168 int byte, offset, word, words, bitpos;
7169 unsigned char value;
7171 /* There are always 32 bits in each long, no matter the size of
7172 the hosts long. We handle floating point representations with
7176 if (total_bytes > len)
7178 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7180 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7182 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7183 bitpos += BITS_PER_UNIT)
7185 byte = (bitpos / BITS_PER_UNIT) & 3;
7186 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7188 if (UNITS_PER_WORD < 4)
7190 word = byte / UNITS_PER_WORD;
7191 if (WORDS_BIG_ENDIAN)
7192 word = (words - 1) - word;
7193 offset = word * UNITS_PER_WORD;
7194 if (BYTES_BIG_ENDIAN)
7195 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7197 offset += byte % UNITS_PER_WORD;
7200 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7201 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7206 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7207 specified by EXPR into the buffer PTR of length LEN bytes.
7208 Return the number of bytes placed in the buffer, or zero
7212 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7217 part = TREE_REALPART (expr);
7218 rsize = native_encode_expr (part, ptr, len);
7221 part = TREE_IMAGPART (expr);
7222 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7225 return rsize + isize;
7229 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7230 specified by EXPR into the buffer PTR of length LEN bytes.
7231 Return the number of bytes placed in the buffer, or zero
7235 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7237 int i, size, offset, count;
7238 tree itype, elem, elements;
7241 elements = TREE_VECTOR_CST_ELTS (expr);
7242 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7243 itype = TREE_TYPE (TREE_TYPE (expr));
7244 size = GET_MODE_SIZE (TYPE_MODE (itype));
7245 for (i = 0; i < count; i++)
7249 elem = TREE_VALUE (elements);
7250 elements = TREE_CHAIN (elements);
7257 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7262 if (offset + size > len)
7264 memset (ptr+offset, 0, size);
7272 /* Subroutine of native_encode_expr. Encode the STRING_CST
7273 specified by EXPR into the buffer PTR of length LEN bytes.
7274 Return the number of bytes placed in the buffer, or zero
7278 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7280 tree type = TREE_TYPE (expr);
7281 HOST_WIDE_INT total_bytes;
7283 if (TREE_CODE (type) != ARRAY_TYPE
7284 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7285 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7286 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7288 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7289 if (total_bytes > len)
7291 if (TREE_STRING_LENGTH (expr) < total_bytes)
7293 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7294 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7295 total_bytes - TREE_STRING_LENGTH (expr));
7298 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7303 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7304 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7305 buffer PTR of length LEN bytes. Return the number of bytes
7306 placed in the buffer, or zero upon failure. */
7309 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7311 switch (TREE_CODE (expr))
7314 return native_encode_int (expr, ptr, len);
7317 return native_encode_real (expr, ptr, len);
7320 return native_encode_complex (expr, ptr, len);
7323 return native_encode_vector (expr, ptr, len);
7326 return native_encode_string (expr, ptr, len);
7334 /* Subroutine of native_interpret_expr. Interpret the contents of
7335 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7336 If the buffer cannot be interpreted, return NULL_TREE. */
7339 native_interpret_int (tree type, const unsigned char *ptr, int len)
7341 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7342 int byte, offset, word, words;
7343 unsigned char value;
7346 if (total_bytes > len)
7348 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7351 result = double_int_zero;
7352 words = total_bytes / UNITS_PER_WORD;
7354 for (byte = 0; byte < total_bytes; byte++)
7356 int bitpos = byte * BITS_PER_UNIT;
7357 if (total_bytes > UNITS_PER_WORD)
7359 word = byte / UNITS_PER_WORD;
7360 if (WORDS_BIG_ENDIAN)
7361 word = (words - 1) - word;
7362 offset = word * UNITS_PER_WORD;
7363 if (BYTES_BIG_ENDIAN)
7364 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7366 offset += byte % UNITS_PER_WORD;
7369 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7370 value = ptr[offset];
7372 if (bitpos < HOST_BITS_PER_WIDE_INT)
7373 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7375 result.high |= (unsigned HOST_WIDE_INT) value
7376 << (bitpos - HOST_BITS_PER_WIDE_INT);
7379 return double_int_to_tree (type, result);
7383 /* Subroutine of native_interpret_expr. Interpret the contents of
7384 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7385 If the buffer cannot be interpreted, return NULL_TREE. */
7388 native_interpret_real (tree type, const unsigned char *ptr, int len)
7390 enum machine_mode mode = TYPE_MODE (type);
7391 int total_bytes = GET_MODE_SIZE (mode);
7392 int byte, offset, word, words, bitpos;
7393 unsigned char value;
7394 /* There are always 32 bits in each long, no matter the size of
7395 the hosts long. We handle floating point representations with
7400 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7401 if (total_bytes > len || total_bytes > 24)
7403 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7405 memset (tmp, 0, sizeof (tmp));
7406 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7407 bitpos += BITS_PER_UNIT)
7409 byte = (bitpos / BITS_PER_UNIT) & 3;
7410 if (UNITS_PER_WORD < 4)
7412 word = byte / UNITS_PER_WORD;
7413 if (WORDS_BIG_ENDIAN)
7414 word = (words - 1) - word;
7415 offset = word * UNITS_PER_WORD;
7416 if (BYTES_BIG_ENDIAN)
7417 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7419 offset += byte % UNITS_PER_WORD;
7422 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7423 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7425 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7428 real_from_target (&r, tmp, mode);
7429 return build_real (type, r);
7433 /* Subroutine of native_interpret_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7435 If the buffer cannot be interpreted, return NULL_TREE. */
7438 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7440 tree etype, rpart, ipart;
7443 etype = TREE_TYPE (type);
7444 size = GET_MODE_SIZE (TYPE_MODE (etype));
7447 rpart = native_interpret_expr (etype, ptr, size);
7450 ipart = native_interpret_expr (etype, ptr+size, size);
7453 return build_complex (type, rpart, ipart);
7457 /* Subroutine of native_interpret_expr. Interpret the contents of
7458 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7459 If the buffer cannot be interpreted, return NULL_TREE. */
7462 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7464 tree etype, elem, elements;
7467 etype = TREE_TYPE (type);
7468 size = GET_MODE_SIZE (TYPE_MODE (etype));
7469 count = TYPE_VECTOR_SUBPARTS (type);
7470 if (size * count > len)
7473 elements = NULL_TREE;
7474 for (i = count - 1; i >= 0; i--)
7476 elem = native_interpret_expr (etype, ptr+(i*size), size);
7479 elements = tree_cons (NULL_TREE, elem, elements);
7481 return build_vector (type, elements);
7485 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7486 the buffer PTR of length LEN as a constant of type TYPE. For
7487 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7488 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7489 return NULL_TREE. */
7492 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7494 switch (TREE_CODE (type))
7499 return native_interpret_int (type, ptr, len);
7502 return native_interpret_real (type, ptr, len);
7505 return native_interpret_complex (type, ptr, len);
7508 return native_interpret_vector (type, ptr, len);
7516 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7517 TYPE at compile-time. If we're unable to perform the conversion
7518 return NULL_TREE. */
7521 fold_view_convert_expr (tree type, tree expr)
7523 /* We support up to 512-bit values (for V8DFmode). */
7524 unsigned char buffer[64];
7527 /* Check that the host and target are sane. */
7528 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7531 len = native_encode_expr (expr, buffer, sizeof (buffer));
7535 return native_interpret_expr (type, buffer, len);
7538 /* Build an expression for the address of T. Folds away INDIRECT_REF
7539 to avoid confusing the gimplify process. */
7542 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7544 /* The size of the object is not relevant when talking about its address. */
7545 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7546 t = TREE_OPERAND (t, 0);
7548 if (TREE_CODE (t) == INDIRECT_REF)
7550 t = TREE_OPERAND (t, 0);
7552 if (TREE_TYPE (t) != ptrtype)
7553 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7555 else if (TREE_CODE (t) == MEM_REF
7556 && integer_zerop (TREE_OPERAND (t, 1)))
7557 return TREE_OPERAND (t, 0);
7558 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7560 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7562 if (TREE_TYPE (t) != ptrtype)
7563 t = fold_convert_loc (loc, ptrtype, t);
7566 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7571 /* Build an expression for the address of T. */
7574 build_fold_addr_expr_loc (location_t loc, tree t)
7576 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7578 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7581 /* Fold a unary expression of code CODE and type TYPE with operand
7582 OP0. Return the folded expression if folding is successful.
7583 Otherwise, return NULL_TREE. */
7586 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7590 enum tree_code_class kind = TREE_CODE_CLASS (code);
7592 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7593 && TREE_CODE_LENGTH (code) == 1);
7598 if (CONVERT_EXPR_CODE_P (code)
7599 || code == FLOAT_EXPR || code == ABS_EXPR)
7601 /* Don't use STRIP_NOPS, because signedness of argument type
7603 STRIP_SIGN_NOPS (arg0);
7607 /* Strip any conversions that don't change the mode. This
7608 is safe for every expression, except for a comparison
7609 expression because its signedness is derived from its
7612 Note that this is done as an internal manipulation within
7613 the constant folder, in order to find the simplest
7614 representation of the arguments so that their form can be
7615 studied. In any cases, the appropriate type conversions
7616 should be put back in the tree that will get out of the
7622 if (TREE_CODE_CLASS (code) == tcc_unary)
7624 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7625 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7626 fold_build1_loc (loc, code, type,
7627 fold_convert_loc (loc, TREE_TYPE (op0),
7628 TREE_OPERAND (arg0, 1))));
7629 else if (TREE_CODE (arg0) == COND_EXPR)
7631 tree arg01 = TREE_OPERAND (arg0, 1);
7632 tree arg02 = TREE_OPERAND (arg0, 2);
7633 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7634 arg01 = fold_build1_loc (loc, code, type,
7635 fold_convert_loc (loc,
7636 TREE_TYPE (op0), arg01));
7637 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7638 arg02 = fold_build1_loc (loc, code, type,
7639 fold_convert_loc (loc,
7640 TREE_TYPE (op0), arg02));
7641 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7644 /* If this was a conversion, and all we did was to move into
7645 inside the COND_EXPR, bring it back out. But leave it if
7646 it is a conversion from integer to integer and the
7647 result precision is no wider than a word since such a
7648 conversion is cheap and may be optimized away by combine,
7649 while it couldn't if it were outside the COND_EXPR. Then return
7650 so we don't get into an infinite recursion loop taking the
7651 conversion out and then back in. */
7653 if ((CONVERT_EXPR_CODE_P (code)
7654 || code == NON_LVALUE_EXPR)
7655 && TREE_CODE (tem) == COND_EXPR
7656 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7657 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7658 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7659 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7660 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7661 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7662 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7664 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7665 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7666 || flag_syntax_only))
7667 tem = build1_loc (loc, code, type,
7669 TREE_TYPE (TREE_OPERAND
7670 (TREE_OPERAND (tem, 1), 0)),
7671 TREE_OPERAND (tem, 0),
7672 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7673 TREE_OPERAND (TREE_OPERAND (tem, 2),
7677 else if (COMPARISON_CLASS_P (arg0))
7679 if (TREE_CODE (type) == BOOLEAN_TYPE)
7681 arg0 = copy_node (arg0);
7682 TREE_TYPE (arg0) = type;
7685 else if (TREE_CODE (type) != INTEGER_TYPE)
7686 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7687 fold_build1_loc (loc, code, type,
7689 fold_build1_loc (loc, code, type,
7690 integer_zero_node));
7697 /* Re-association barriers around constants and other re-association
7698 barriers can be removed. */
7699 if (CONSTANT_CLASS_P (op0)
7700 || TREE_CODE (op0) == PAREN_EXPR)
7701 return fold_convert_loc (loc, type, op0);
7706 case FIX_TRUNC_EXPR:
7707 if (TREE_TYPE (op0) == type)
7710 /* If we have (type) (a CMP b) and type is an integral type, return
7711 new expression involving the new type. */
7712 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7713 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7714 TREE_OPERAND (op0, 1));
7716 /* Handle cases of two conversions in a row. */
7717 if (CONVERT_EXPR_P (op0))
7719 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7720 tree inter_type = TREE_TYPE (op0);
7721 int inside_int = INTEGRAL_TYPE_P (inside_type);
7722 int inside_ptr = POINTER_TYPE_P (inside_type);
7723 int inside_float = FLOAT_TYPE_P (inside_type);
7724 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7725 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7726 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7727 int inter_int = INTEGRAL_TYPE_P (inter_type);
7728 int inter_ptr = POINTER_TYPE_P (inter_type);
7729 int inter_float = FLOAT_TYPE_P (inter_type);
7730 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7731 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7732 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7733 int final_int = INTEGRAL_TYPE_P (type);
7734 int final_ptr = POINTER_TYPE_P (type);
7735 int final_float = FLOAT_TYPE_P (type);
7736 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7737 unsigned int final_prec = TYPE_PRECISION (type);
7738 int final_unsignedp = TYPE_UNSIGNED (type);
7740 /* In addition to the cases of two conversions in a row
7741 handled below, if we are converting something to its own
7742 type via an object of identical or wider precision, neither
7743 conversion is needed. */
7744 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7745 && (((inter_int || inter_ptr) && final_int)
7746 || (inter_float && final_float))
7747 && inter_prec >= final_prec)
7748 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7750 /* Likewise, if the intermediate and initial types are either both
7751 float or both integer, we don't need the middle conversion if the
7752 former is wider than the latter and doesn't change the signedness
7753 (for integers). Avoid this if the final type is a pointer since
7754 then we sometimes need the middle conversion. Likewise if the
7755 final type has a precision not equal to the size of its mode. */
7756 if (((inter_int && inside_int)
7757 || (inter_float && inside_float)
7758 || (inter_vec && inside_vec))
7759 && inter_prec >= inside_prec
7760 && (inter_float || inter_vec
7761 || inter_unsignedp == inside_unsignedp)
7762 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7763 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7765 && (! final_vec || inter_prec == inside_prec))
7766 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7768 /* If we have a sign-extension of a zero-extended value, we can
7769 replace that by a single zero-extension. */
7770 if (inside_int && inter_int && final_int
7771 && inside_prec < inter_prec && inter_prec < final_prec
7772 && inside_unsignedp && !inter_unsignedp)
7773 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7775 /* Two conversions in a row are not needed unless:
7776 - some conversion is floating-point (overstrict for now), or
7777 - some conversion is a vector (overstrict for now), or
7778 - the intermediate type is narrower than both initial and
7780 - the intermediate type and innermost type differ in signedness,
7781 and the outermost type is wider than the intermediate, or
7782 - the initial type is a pointer type and the precisions of the
7783 intermediate and final types differ, or
7784 - the final type is a pointer type and the precisions of the
7785 initial and intermediate types differ. */
7786 if (! inside_float && ! inter_float && ! final_float
7787 && ! inside_vec && ! inter_vec && ! final_vec
7788 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7789 && ! (inside_int && inter_int
7790 && inter_unsignedp != inside_unsignedp
7791 && inter_prec < final_prec)
7792 && ((inter_unsignedp && inter_prec > inside_prec)
7793 == (final_unsignedp && final_prec > inter_prec))
7794 && ! (inside_ptr && inter_prec != final_prec)
7795 && ! (final_ptr && inside_prec != inter_prec)
7796 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7797 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7798 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7801 /* Handle (T *)&A.B.C for A being of type T and B and C
7802 living at offset zero. This occurs frequently in
7803 C++ upcasting and then accessing the base. */
7804 if (TREE_CODE (op0) == ADDR_EXPR
7805 && POINTER_TYPE_P (type)
7806 && handled_component_p (TREE_OPERAND (op0, 0)))
7808 HOST_WIDE_INT bitsize, bitpos;
7810 enum machine_mode mode;
7811 int unsignedp, volatilep;
7812 tree base = TREE_OPERAND (op0, 0);
7813 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7814 &mode, &unsignedp, &volatilep, false);
7815 /* If the reference was to a (constant) zero offset, we can use
7816 the address of the base if it has the same base type
7817 as the result type and the pointer type is unqualified. */
7818 if (! offset && bitpos == 0
7819 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7820 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7821 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7822 return fold_convert_loc (loc, type,
7823 build_fold_addr_expr_loc (loc, base));
7826 if (TREE_CODE (op0) == MODIFY_EXPR
7827 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7828 /* Detect assigning a bitfield. */
7829 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7831 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7833 /* Don't leave an assignment inside a conversion
7834 unless assigning a bitfield. */
7835 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7836 /* First do the assignment, then return converted constant. */
7837 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7838 TREE_NO_WARNING (tem) = 1;
7839 TREE_USED (tem) = 1;
7843 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7844 constants (if x has signed type, the sign bit cannot be set
7845 in c). This folds extension into the BIT_AND_EXPR.
7846 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7847 very likely don't have maximal range for their precision and this
7848 transformation effectively doesn't preserve non-maximal ranges. */
7849 if (TREE_CODE (type) == INTEGER_TYPE
7850 && TREE_CODE (op0) == BIT_AND_EXPR
7851 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7853 tree and_expr = op0;
7854 tree and0 = TREE_OPERAND (and_expr, 0);
7855 tree and1 = TREE_OPERAND (and_expr, 1);
7858 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7859 || (TYPE_PRECISION (type)
7860 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7862 else if (TYPE_PRECISION (TREE_TYPE (and1))
7863 <= HOST_BITS_PER_WIDE_INT
7864 && host_integerp (and1, 1))
7866 unsigned HOST_WIDE_INT cst;
7868 cst = tree_low_cst (and1, 1);
7869 cst &= (HOST_WIDE_INT) -1
7870 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7871 change = (cst == 0);
7872 #ifdef LOAD_EXTEND_OP
7874 && !flag_syntax_only
7875 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7878 tree uns = unsigned_type_for (TREE_TYPE (and0));
7879 and0 = fold_convert_loc (loc, uns, and0);
7880 and1 = fold_convert_loc (loc, uns, and1);
7886 tem = force_fit_type_double (type, tree_to_double_int (and1),
7887 0, TREE_OVERFLOW (and1));
7888 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7889 fold_convert_loc (loc, type, and0), tem);
7893 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7894 when one of the new casts will fold away. Conservatively we assume
7895 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7896 if (POINTER_TYPE_P (type)
7897 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7898 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7899 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7900 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7902 tree arg00 = TREE_OPERAND (arg0, 0);
7903 tree arg01 = TREE_OPERAND (arg0, 1);
7905 return fold_build2_loc (loc,
7906 TREE_CODE (arg0), type,
7907 fold_convert_loc (loc, type, arg00),
7908 fold_convert_loc (loc, sizetype, arg01));
7911 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7912 of the same precision, and X is an integer type not narrower than
7913 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7914 if (INTEGRAL_TYPE_P (type)
7915 && TREE_CODE (op0) == BIT_NOT_EXPR
7916 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7917 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7918 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7920 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7921 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7922 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7923 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7924 fold_convert_loc (loc, type, tem));
7927 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7928 type of X and Y (integer types only). */
7929 if (INTEGRAL_TYPE_P (type)
7930 && TREE_CODE (op0) == MULT_EXPR
7931 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7932 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7934 /* Be careful not to introduce new overflows. */
7936 if (TYPE_OVERFLOW_WRAPS (type))
7939 mult_type = unsigned_type_for (type);
7941 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7943 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7944 fold_convert_loc (loc, mult_type,
7945 TREE_OPERAND (op0, 0)),
7946 fold_convert_loc (loc, mult_type,
7947 TREE_OPERAND (op0, 1)));
7948 return fold_convert_loc (loc, type, tem);
7952 tem = fold_convert_const (code, type, op0);
7953 return tem ? tem : NULL_TREE;
7955 case ADDR_SPACE_CONVERT_EXPR:
7956 if (integer_zerop (arg0))
7957 return fold_convert_const (code, type, arg0);
7960 case FIXED_CONVERT_EXPR:
7961 tem = fold_convert_const (code, type, arg0);
7962 return tem ? tem : NULL_TREE;
7964 case VIEW_CONVERT_EXPR:
7965 if (TREE_TYPE (op0) == type)
7967 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7968 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7969 type, TREE_OPERAND (op0, 0));
7970 if (TREE_CODE (op0) == MEM_REF)
7971 return fold_build2_loc (loc, MEM_REF, type,
7972 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7974 /* For integral conversions with the same precision or pointer
7975 conversions use a NOP_EXPR instead. */
7976 if ((INTEGRAL_TYPE_P (type)
7977 || POINTER_TYPE_P (type))
7978 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7979 || POINTER_TYPE_P (TREE_TYPE (op0)))
7980 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7981 return fold_convert_loc (loc, type, op0);
7983 /* Strip inner integral conversions that do not change the precision. */
7984 if (CONVERT_EXPR_P (op0)
7985 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7986 || POINTER_TYPE_P (TREE_TYPE (op0)))
7987 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7988 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7989 && (TYPE_PRECISION (TREE_TYPE (op0))
7990 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7991 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7992 type, TREE_OPERAND (op0, 0));
7994 return fold_view_convert_expr (type, op0);
7997 tem = fold_negate_expr (loc, arg0);
7999 return fold_convert_loc (loc, type, tem);
8003 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8004 return fold_abs_const (arg0, type);
8005 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8006 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8007 /* Convert fabs((double)float) into (double)fabsf(float). */
8008 else if (TREE_CODE (arg0) == NOP_EXPR
8009 && TREE_CODE (type) == REAL_TYPE)
8011 tree targ0 = strip_float_extensions (arg0);
8013 return fold_convert_loc (loc, type,
8014 fold_build1_loc (loc, ABS_EXPR,
8018 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8019 else if (TREE_CODE (arg0) == ABS_EXPR)
8021 else if (tree_expr_nonnegative_p (arg0))
8024 /* Strip sign ops from argument. */
8025 if (TREE_CODE (type) == REAL_TYPE)
8027 tem = fold_strip_sign_ops (arg0);
8029 return fold_build1_loc (loc, ABS_EXPR, type,
8030 fold_convert_loc (loc, type, tem));
8035 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8036 return fold_convert_loc (loc, type, arg0);
8037 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8039 tree itype = TREE_TYPE (type);
8040 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8041 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8042 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8043 negate_expr (ipart));
8045 if (TREE_CODE (arg0) == COMPLEX_CST)
8047 tree itype = TREE_TYPE (type);
8048 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8049 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8050 return build_complex (type, rpart, negate_expr (ipart));
8052 if (TREE_CODE (arg0) == CONJ_EXPR)
8053 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8057 if (TREE_CODE (arg0) == INTEGER_CST)
8058 return fold_not_const (arg0, type);
8059 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8060 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8061 /* Convert ~ (-A) to A - 1. */
8062 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8063 return fold_build2_loc (loc, MINUS_EXPR, type,
8064 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8065 build_int_cst (type, 1));
8066 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8067 else if (INTEGRAL_TYPE_P (type)
8068 && ((TREE_CODE (arg0) == MINUS_EXPR
8069 && integer_onep (TREE_OPERAND (arg0, 1)))
8070 || (TREE_CODE (arg0) == PLUS_EXPR
8071 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8072 return fold_build1_loc (loc, NEGATE_EXPR, type,
8073 fold_convert_loc (loc, type,
8074 TREE_OPERAND (arg0, 0)));
8075 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8076 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8077 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8078 fold_convert_loc (loc, type,
8079 TREE_OPERAND (arg0, 0)))))
8080 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8081 fold_convert_loc (loc, type,
8082 TREE_OPERAND (arg0, 1)));
8083 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8084 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8085 fold_convert_loc (loc, type,
8086 TREE_OPERAND (arg0, 1)))))
8087 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8088 fold_convert_loc (loc, type,
8089 TREE_OPERAND (arg0, 0)), tem);
8090 /* Perform BIT_NOT_EXPR on each element individually. */
8091 else if (TREE_CODE (arg0) == VECTOR_CST)
8093 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8094 int count = TYPE_VECTOR_SUBPARTS (type), i;
8096 for (i = 0; i < count; i++)
8100 elem = TREE_VALUE (elements);
8101 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8102 if (elem == NULL_TREE)
8104 elements = TREE_CHAIN (elements);
8107 elem = build_int_cst (TREE_TYPE (type), -1);
8108 list = tree_cons (NULL_TREE, elem, list);
8111 return build_vector (type, nreverse (list));
8116 case TRUTH_NOT_EXPR:
8117 /* The argument to invert_truthvalue must have Boolean type. */
8118 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8119 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8121 /* Note that the operand of this must be an int
8122 and its values must be 0 or 1.
8123 ("true" is a fixed value perhaps depending on the language,
8124 but we don't handle values other than 1 correctly yet.) */
8125 tem = fold_truth_not_expr (loc, arg0);
8128 return fold_convert_loc (loc, type, tem);
8131 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8132 return fold_convert_loc (loc, type, arg0);
8133 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8134 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8135 TREE_OPERAND (arg0, 1));
8136 if (TREE_CODE (arg0) == COMPLEX_CST)
8137 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8138 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8140 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8141 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8142 fold_build1_loc (loc, REALPART_EXPR, itype,
8143 TREE_OPERAND (arg0, 0)),
8144 fold_build1_loc (loc, REALPART_EXPR, itype,
8145 TREE_OPERAND (arg0, 1)));
8146 return fold_convert_loc (loc, type, tem);
8148 if (TREE_CODE (arg0) == CONJ_EXPR)
8150 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8151 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8152 TREE_OPERAND (arg0, 0));
8153 return fold_convert_loc (loc, type, tem);
8155 if (TREE_CODE (arg0) == CALL_EXPR)
8157 tree fn = get_callee_fndecl (arg0);
8158 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8159 switch (DECL_FUNCTION_CODE (fn))
8161 CASE_FLT_FN (BUILT_IN_CEXPI):
8162 fn = mathfn_built_in (type, BUILT_IN_COS);
8164 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8174 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8175 return build_zero_cst (type);
8176 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8177 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8178 TREE_OPERAND (arg0, 0));
8179 if (TREE_CODE (arg0) == COMPLEX_CST)
8180 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8181 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8183 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8184 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8185 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8186 TREE_OPERAND (arg0, 0)),
8187 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8188 TREE_OPERAND (arg0, 1)));
8189 return fold_convert_loc (loc, type, tem);
8191 if (TREE_CODE (arg0) == CONJ_EXPR)
8193 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8194 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8195 return fold_convert_loc (loc, type, negate_expr (tem));
8197 if (TREE_CODE (arg0) == CALL_EXPR)
8199 tree fn = get_callee_fndecl (arg0);
8200 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8201 switch (DECL_FUNCTION_CODE (fn))
8203 CASE_FLT_FN (BUILT_IN_CEXPI):
8204 fn = mathfn_built_in (type, BUILT_IN_SIN);
8206 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8216 /* Fold *&X to X if X is an lvalue. */
8217 if (TREE_CODE (op0) == ADDR_EXPR)
8219 tree op00 = TREE_OPERAND (op0, 0);
8220 if ((TREE_CODE (op00) == VAR_DECL
8221 || TREE_CODE (op00) == PARM_DECL
8222 || TREE_CODE (op00) == RESULT_DECL)
8223 && !TREE_READONLY (op00))
8230 } /* switch (code) */
8234 /* If the operation was a conversion do _not_ mark a resulting constant
8235 with TREE_OVERFLOW if the original constant was not. These conversions
8236 have implementation defined behavior and retaining the TREE_OVERFLOW
8237 flag here would confuse later passes such as VRP. */
8239 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8240 tree type, tree op0)
8242 tree res = fold_unary_loc (loc, code, type, op0);
8244 && TREE_CODE (res) == INTEGER_CST
8245 && TREE_CODE (op0) == INTEGER_CST
8246 && CONVERT_EXPR_CODE_P (code))
8247 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8252 /* Fold a binary expression of code CODE and type TYPE with operands
8253 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8254 Return the folded expression if folding is successful. Otherwise,
8255 return NULL_TREE. */
8258 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8260 enum tree_code compl_code;
8262 if (code == MIN_EXPR)
8263 compl_code = MAX_EXPR;
8264 else if (code == MAX_EXPR)
8265 compl_code = MIN_EXPR;
8269 /* MIN (MAX (a, b), b) == b. */
8270 if (TREE_CODE (op0) == compl_code
8271 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8272 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8274 /* MIN (MAX (b, a), b) == b. */
8275 if (TREE_CODE (op0) == compl_code
8276 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8277 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8278 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8280 /* MIN (a, MAX (a, b)) == a. */
8281 if (TREE_CODE (op1) == compl_code
8282 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8283 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8284 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8286 /* MIN (a, MAX (b, a)) == a. */
8287 if (TREE_CODE (op1) == compl_code
8288 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8289 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8290 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8295 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8296 by changing CODE to reduce the magnitude of constants involved in
8297 ARG0 of the comparison.
8298 Returns a canonicalized comparison tree if a simplification was
8299 possible, otherwise returns NULL_TREE.
8300 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8301 valid if signed overflow is undefined. */
8304 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8305 tree arg0, tree arg1,
8306 bool *strict_overflow_p)
8308 enum tree_code code0 = TREE_CODE (arg0);
8309 tree t, cst0 = NULL_TREE;
8313 /* Match A +- CST code arg1 and CST code arg1. We can change the
8314 first form only if overflow is undefined. */
8315 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8316 /* In principle pointers also have undefined overflow behavior,
8317 but that causes problems elsewhere. */
8318 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8319 && (code0 == MINUS_EXPR
8320 || code0 == PLUS_EXPR)
8321 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8322 || code0 == INTEGER_CST))
8325 /* Identify the constant in arg0 and its sign. */
8326 if (code0 == INTEGER_CST)
8329 cst0 = TREE_OPERAND (arg0, 1);
8330 sgn0 = tree_int_cst_sgn (cst0);
8332 /* Overflowed constants and zero will cause problems. */
8333 if (integer_zerop (cst0)
8334 || TREE_OVERFLOW (cst0))
8337 /* See if we can reduce the magnitude of the constant in
8338 arg0 by changing the comparison code. */
8339 if (code0 == INTEGER_CST)
8341 /* CST <= arg1 -> CST-1 < arg1. */
8342 if (code == LE_EXPR && sgn0 == 1)
8344 /* -CST < arg1 -> -CST-1 <= arg1. */
8345 else if (code == LT_EXPR && sgn0 == -1)
8347 /* CST > arg1 -> CST-1 >= arg1. */
8348 else if (code == GT_EXPR && sgn0 == 1)
8350 /* -CST >= arg1 -> -CST-1 > arg1. */
8351 else if (code == GE_EXPR && sgn0 == -1)
8355 /* arg1 code' CST' might be more canonical. */
8360 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8362 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8364 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8365 else if (code == GT_EXPR
8366 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8368 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8369 else if (code == LE_EXPR
8370 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8372 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8373 else if (code == GE_EXPR
8374 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8378 *strict_overflow_p = true;
8381 /* Now build the constant reduced in magnitude. But not if that
8382 would produce one outside of its types range. */
8383 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8385 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8386 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8388 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8389 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8390 /* We cannot swap the comparison here as that would cause us to
8391 endlessly recurse. */
8394 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8395 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8396 if (code0 != INTEGER_CST)
8397 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8399 /* If swapping might yield to a more canonical form, do so. */
8401 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8403 return fold_build2_loc (loc, code, type, t, arg1);
8406 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8407 overflow further. Try to decrease the magnitude of constants involved
8408 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8409 and put sole constants at the second argument position.
8410 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8413 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8414 tree arg0, tree arg1)
8417 bool strict_overflow_p;
8418 const char * const warnmsg = G_("assuming signed overflow does not occur "
8419 "when reducing constant in comparison");
8421 /* Try canonicalization by simplifying arg0. */
8422 strict_overflow_p = false;
8423 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8424 &strict_overflow_p);
8427 if (strict_overflow_p)
8428 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8432 /* Try canonicalization by simplifying arg1 using the swapped
8434 code = swap_tree_comparison (code);
8435 strict_overflow_p = false;
8436 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8437 &strict_overflow_p);
8438 if (t && strict_overflow_p)
8439 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8443 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8444 space. This is used to avoid issuing overflow warnings for
8445 expressions like &p->x which can not wrap. */
8448 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8450 unsigned HOST_WIDE_INT offset_low, total_low;
8451 HOST_WIDE_INT size, offset_high, total_high;
8453 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8459 if (offset == NULL_TREE)
8464 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8468 offset_low = TREE_INT_CST_LOW (offset);
8469 offset_high = TREE_INT_CST_HIGH (offset);
8472 if (add_double_with_sign (offset_low, offset_high,
8473 bitpos / BITS_PER_UNIT, 0,
8474 &total_low, &total_high,
8478 if (total_high != 0)
8481 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8485 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8487 if (TREE_CODE (base) == ADDR_EXPR)
8489 HOST_WIDE_INT base_size;
8491 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8492 if (base_size > 0 && size < base_size)
8496 return total_low > (unsigned HOST_WIDE_INT) size;
8499 /* Subroutine of fold_binary. This routine performs all of the
8500 transformations that are common to the equality/inequality
8501 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8502 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8503 fold_binary should call fold_binary. Fold a comparison with
8504 tree code CODE and type TYPE with operands OP0 and OP1. Return
8505 the folded comparison or NULL_TREE. */
8508 fold_comparison (location_t loc, enum tree_code code, tree type,
8511 tree arg0, arg1, tem;
8516 STRIP_SIGN_NOPS (arg0);
8517 STRIP_SIGN_NOPS (arg1);
8519 tem = fold_relational_const (code, type, arg0, arg1);
8520 if (tem != NULL_TREE)
8523 /* If one arg is a real or integer constant, put it last. */
8524 if (tree_swap_operands_p (arg0, arg1, true))
8525 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8527 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8528 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8529 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8530 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8531 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8532 && (TREE_CODE (arg1) == INTEGER_CST
8533 && !TREE_OVERFLOW (arg1)))
8535 tree const1 = TREE_OPERAND (arg0, 1);
8537 tree variable = TREE_OPERAND (arg0, 0);
8540 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8542 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8543 TREE_TYPE (arg1), const2, const1);
8545 /* If the constant operation overflowed this can be
8546 simplified as a comparison against INT_MAX/INT_MIN. */
8547 if (TREE_CODE (lhs) == INTEGER_CST
8548 && TREE_OVERFLOW (lhs))
8550 int const1_sgn = tree_int_cst_sgn (const1);
8551 enum tree_code code2 = code;
8553 /* Get the sign of the constant on the lhs if the
8554 operation were VARIABLE + CONST1. */
8555 if (TREE_CODE (arg0) == MINUS_EXPR)
8556 const1_sgn = -const1_sgn;
8558 /* The sign of the constant determines if we overflowed
8559 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8560 Canonicalize to the INT_MIN overflow by swapping the comparison
8562 if (const1_sgn == -1)
8563 code2 = swap_tree_comparison (code);
8565 /* We now can look at the canonicalized case
8566 VARIABLE + 1 CODE2 INT_MIN
8567 and decide on the result. */
8568 if (code2 == LT_EXPR
8570 || code2 == EQ_EXPR)
8571 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8572 else if (code2 == NE_EXPR
8574 || code2 == GT_EXPR)
8575 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8578 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8579 && (TREE_CODE (lhs) != INTEGER_CST
8580 || !TREE_OVERFLOW (lhs)))
8582 if (code != EQ_EXPR && code != NE_EXPR)
8583 fold_overflow_warning ("assuming signed overflow does not occur "
8584 "when changing X +- C1 cmp C2 to "
8586 WARN_STRICT_OVERFLOW_COMPARISON);
8587 return fold_build2_loc (loc, code, type, variable, lhs);
8591 /* For comparisons of pointers we can decompose it to a compile time
8592 comparison of the base objects and the offsets into the object.
8593 This requires at least one operand being an ADDR_EXPR or a
8594 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8595 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8596 && (TREE_CODE (arg0) == ADDR_EXPR
8597 || TREE_CODE (arg1) == ADDR_EXPR
8598 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8599 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8601 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8602 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8603 enum machine_mode mode;
8604 int volatilep, unsignedp;
8605 bool indirect_base0 = false, indirect_base1 = false;
8607 /* Get base and offset for the access. Strip ADDR_EXPR for
8608 get_inner_reference, but put it back by stripping INDIRECT_REF
8609 off the base object if possible. indirect_baseN will be true
8610 if baseN is not an address but refers to the object itself. */
8612 if (TREE_CODE (arg0) == ADDR_EXPR)
8614 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8615 &bitsize, &bitpos0, &offset0, &mode,
8616 &unsignedp, &volatilep, false);
8617 if (TREE_CODE (base0) == INDIRECT_REF)
8618 base0 = TREE_OPERAND (base0, 0);
8620 indirect_base0 = true;
8622 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8624 base0 = TREE_OPERAND (arg0, 0);
8625 STRIP_SIGN_NOPS (base0);
8626 if (TREE_CODE (base0) == ADDR_EXPR)
8628 base0 = TREE_OPERAND (base0, 0);
8629 indirect_base0 = true;
8631 offset0 = TREE_OPERAND (arg0, 1);
8635 if (TREE_CODE (arg1) == ADDR_EXPR)
8637 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8638 &bitsize, &bitpos1, &offset1, &mode,
8639 &unsignedp, &volatilep, false);
8640 if (TREE_CODE (base1) == INDIRECT_REF)
8641 base1 = TREE_OPERAND (base1, 0);
8643 indirect_base1 = true;
8645 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8647 base1 = TREE_OPERAND (arg1, 0);
8648 STRIP_SIGN_NOPS (base1);
8649 if (TREE_CODE (base1) == ADDR_EXPR)
8651 base1 = TREE_OPERAND (base1, 0);
8652 indirect_base1 = true;
8654 offset1 = TREE_OPERAND (arg1, 1);
8657 /* A local variable can never be pointed to by
8658 the default SSA name of an incoming parameter. */
8659 if ((TREE_CODE (arg0) == ADDR_EXPR
8661 && TREE_CODE (base0) == VAR_DECL
8662 && auto_var_in_fn_p (base0, current_function_decl)
8664 && TREE_CODE (base1) == SSA_NAME
8665 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8666 && SSA_NAME_IS_DEFAULT_DEF (base1))
8667 || (TREE_CODE (arg1) == ADDR_EXPR
8669 && TREE_CODE (base1) == VAR_DECL
8670 && auto_var_in_fn_p (base1, current_function_decl)
8672 && TREE_CODE (base0) == SSA_NAME
8673 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8674 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8676 if (code == NE_EXPR)
8677 return constant_boolean_node (1, type);
8678 else if (code == EQ_EXPR)
8679 return constant_boolean_node (0, type);
8681 /* If we have equivalent bases we might be able to simplify. */
8682 else if (indirect_base0 == indirect_base1
8683 && operand_equal_p (base0, base1, 0))
8685 /* We can fold this expression to a constant if the non-constant
8686 offset parts are equal. */
8687 if ((offset0 == offset1
8688 || (offset0 && offset1
8689 && operand_equal_p (offset0, offset1, 0)))
8692 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8697 && bitpos0 != bitpos1
8698 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8699 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8700 fold_overflow_warning (("assuming pointer wraparound does not "
8701 "occur when comparing P +- C1 with "
8703 WARN_STRICT_OVERFLOW_CONDITIONAL);
8708 return constant_boolean_node (bitpos0 == bitpos1, type);
8710 return constant_boolean_node (bitpos0 != bitpos1, type);
8712 return constant_boolean_node (bitpos0 < bitpos1, type);
8714 return constant_boolean_node (bitpos0 <= bitpos1, type);
8716 return constant_boolean_node (bitpos0 >= bitpos1, type);
8718 return constant_boolean_node (bitpos0 > bitpos1, type);
8722 /* We can simplify the comparison to a comparison of the variable
8723 offset parts if the constant offset parts are equal.
8724 Be careful to use signed size type here because otherwise we
8725 mess with array offsets in the wrong way. This is possible
8726 because pointer arithmetic is restricted to retain within an
8727 object and overflow on pointer differences is undefined as of
8728 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8729 else if (bitpos0 == bitpos1
8730 && ((code == EQ_EXPR || code == NE_EXPR)
8731 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8733 /* By converting to signed size type we cover middle-end pointer
8734 arithmetic which operates on unsigned pointer types of size
8735 type size and ARRAY_REF offsets which are properly sign or
8736 zero extended from their type in case it is narrower than
8738 if (offset0 == NULL_TREE)
8739 offset0 = build_int_cst (ssizetype, 0);
8741 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8742 if (offset1 == NULL_TREE)
8743 offset1 = build_int_cst (ssizetype, 0);
8745 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8749 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8750 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8751 fold_overflow_warning (("assuming pointer wraparound does not "
8752 "occur when comparing P +- C1 with "
8754 WARN_STRICT_OVERFLOW_COMPARISON);
8756 return fold_build2_loc (loc, code, type, offset0, offset1);
8759 /* For non-equal bases we can simplify if they are addresses
8760 of local binding decls or constants. */
8761 else if (indirect_base0 && indirect_base1
8762 /* We know that !operand_equal_p (base0, base1, 0)
8763 because the if condition was false. But make
8764 sure two decls are not the same. */
8766 && TREE_CODE (arg0) == ADDR_EXPR
8767 && TREE_CODE (arg1) == ADDR_EXPR
8768 && (((TREE_CODE (base0) == VAR_DECL
8769 || TREE_CODE (base0) == PARM_DECL)
8770 && (targetm.binds_local_p (base0)
8771 || CONSTANT_CLASS_P (base1)))
8772 || CONSTANT_CLASS_P (base0))
8773 && (((TREE_CODE (base1) == VAR_DECL
8774 || TREE_CODE (base1) == PARM_DECL)
8775 && (targetm.binds_local_p (base1)
8776 || CONSTANT_CLASS_P (base0)))
8777 || CONSTANT_CLASS_P (base1)))
8779 if (code == EQ_EXPR)
8780 return omit_two_operands_loc (loc, type, boolean_false_node,
8782 else if (code == NE_EXPR)
8783 return omit_two_operands_loc (loc, type, boolean_true_node,
8786 /* For equal offsets we can simplify to a comparison of the
8788 else if (bitpos0 == bitpos1
8790 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8792 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8793 && ((offset0 == offset1)
8794 || (offset0 && offset1
8795 && operand_equal_p (offset0, offset1, 0))))
8798 base0 = build_fold_addr_expr_loc (loc, base0);
8800 base1 = build_fold_addr_expr_loc (loc, base1);
8801 return fold_build2_loc (loc, code, type, base0, base1);
8805 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8806 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8807 the resulting offset is smaller in absolute value than the
8809 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8810 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8811 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8812 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8813 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8814 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8815 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8817 tree const1 = TREE_OPERAND (arg0, 1);
8818 tree const2 = TREE_OPERAND (arg1, 1);
8819 tree variable1 = TREE_OPERAND (arg0, 0);
8820 tree variable2 = TREE_OPERAND (arg1, 0);
8822 const char * const warnmsg = G_("assuming signed overflow does not "
8823 "occur when combining constants around "
8826 /* Put the constant on the side where it doesn't overflow and is
8827 of lower absolute value than before. */
8828 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8829 ? MINUS_EXPR : PLUS_EXPR,
8831 if (!TREE_OVERFLOW (cst)
8832 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8834 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8835 return fold_build2_loc (loc, code, type,
8837 fold_build2_loc (loc,
8838 TREE_CODE (arg1), TREE_TYPE (arg1),
8842 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8843 ? MINUS_EXPR : PLUS_EXPR,
8845 if (!TREE_OVERFLOW (cst)
8846 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8848 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8849 return fold_build2_loc (loc, code, type,
8850 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8856 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8857 signed arithmetic case. That form is created by the compiler
8858 often enough for folding it to be of value. One example is in
8859 computing loop trip counts after Operator Strength Reduction. */
8860 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8861 && TREE_CODE (arg0) == MULT_EXPR
8862 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8863 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8864 && integer_zerop (arg1))
8866 tree const1 = TREE_OPERAND (arg0, 1);
8867 tree const2 = arg1; /* zero */
8868 tree variable1 = TREE_OPERAND (arg0, 0);
8869 enum tree_code cmp_code = code;
8871 /* Handle unfolded multiplication by zero. */
8872 if (integer_zerop (const1))
8873 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8875 fold_overflow_warning (("assuming signed overflow does not occur when "
8876 "eliminating multiplication in comparison "
8878 WARN_STRICT_OVERFLOW_COMPARISON);
8880 /* If const1 is negative we swap the sense of the comparison. */
8881 if (tree_int_cst_sgn (const1) < 0)
8882 cmp_code = swap_tree_comparison (cmp_code);
8884 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8887 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8891 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8893 tree targ0 = strip_float_extensions (arg0);
8894 tree targ1 = strip_float_extensions (arg1);
8895 tree newtype = TREE_TYPE (targ0);
8897 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8898 newtype = TREE_TYPE (targ1);
8900 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8901 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8902 return fold_build2_loc (loc, code, type,
8903 fold_convert_loc (loc, newtype, targ0),
8904 fold_convert_loc (loc, newtype, targ1));
8906 /* (-a) CMP (-b) -> b CMP a */
8907 if (TREE_CODE (arg0) == NEGATE_EXPR
8908 && TREE_CODE (arg1) == NEGATE_EXPR)
8909 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8910 TREE_OPERAND (arg0, 0));
8912 if (TREE_CODE (arg1) == REAL_CST)
8914 REAL_VALUE_TYPE cst;
8915 cst = TREE_REAL_CST (arg1);
8917 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8918 if (TREE_CODE (arg0) == NEGATE_EXPR)
8919 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8920 TREE_OPERAND (arg0, 0),
8921 build_real (TREE_TYPE (arg1),
8922 real_value_negate (&cst)));
8924 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8925 /* a CMP (-0) -> a CMP 0 */
8926 if (REAL_VALUE_MINUS_ZERO (cst))
8927 return fold_build2_loc (loc, code, type, arg0,
8928 build_real (TREE_TYPE (arg1), dconst0));
8930 /* x != NaN is always true, other ops are always false. */
8931 if (REAL_VALUE_ISNAN (cst)
8932 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8934 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8935 return omit_one_operand_loc (loc, type, tem, arg0);
8938 /* Fold comparisons against infinity. */
8939 if (REAL_VALUE_ISINF (cst)
8940 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8942 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8943 if (tem != NULL_TREE)
8948 /* If this is a comparison of a real constant with a PLUS_EXPR
8949 or a MINUS_EXPR of a real constant, we can convert it into a
8950 comparison with a revised real constant as long as no overflow
8951 occurs when unsafe_math_optimizations are enabled. */
8952 if (flag_unsafe_math_optimizations
8953 && TREE_CODE (arg1) == REAL_CST
8954 && (TREE_CODE (arg0) == PLUS_EXPR
8955 || TREE_CODE (arg0) == MINUS_EXPR)
8956 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8957 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8958 ? MINUS_EXPR : PLUS_EXPR,
8959 arg1, TREE_OPERAND (arg0, 1)))
8960 && !TREE_OVERFLOW (tem))
8961 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8963 /* Likewise, we can simplify a comparison of a real constant with
8964 a MINUS_EXPR whose first operand is also a real constant, i.e.
8965 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8966 floating-point types only if -fassociative-math is set. */
8967 if (flag_associative_math
8968 && TREE_CODE (arg1) == REAL_CST
8969 && TREE_CODE (arg0) == MINUS_EXPR
8970 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8971 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8973 && !TREE_OVERFLOW (tem))
8974 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8975 TREE_OPERAND (arg0, 1), tem);
8977 /* Fold comparisons against built-in math functions. */
8978 if (TREE_CODE (arg1) == REAL_CST
8979 && flag_unsafe_math_optimizations
8980 && ! flag_errno_math)
8982 enum built_in_function fcode = builtin_mathfn_code (arg0);
8984 if (fcode != END_BUILTINS)
8986 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8987 if (tem != NULL_TREE)
8993 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8994 && CONVERT_EXPR_P (arg0))
8996 /* If we are widening one operand of an integer comparison,
8997 see if the other operand is similarly being widened. Perhaps we
8998 can do the comparison in the narrower type. */
8999 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9003 /* Or if we are changing signedness. */
9004 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9009 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9010 constant, we can simplify it. */
9011 if (TREE_CODE (arg1) == INTEGER_CST
9012 && (TREE_CODE (arg0) == MIN_EXPR
9013 || TREE_CODE (arg0) == MAX_EXPR)
9014 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9016 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9021 /* Simplify comparison of something with itself. (For IEEE
9022 floating-point, we can only do some of these simplifications.) */
9023 if (operand_equal_p (arg0, arg1, 0))
9028 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9029 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9030 return constant_boolean_node (1, type);
9035 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9036 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9037 return constant_boolean_node (1, type);
9038 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9041 /* For NE, we can only do this simplification if integer
9042 or we don't honor IEEE floating point NaNs. */
9043 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9044 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9046 /* ... fall through ... */
9049 return constant_boolean_node (0, type);
9055 /* If we are comparing an expression that just has comparisons
9056 of two integer values, arithmetic expressions of those comparisons,
9057 and constants, we can simplify it. There are only three cases
9058 to check: the two values can either be equal, the first can be
9059 greater, or the second can be greater. Fold the expression for
9060 those three values. Since each value must be 0 or 1, we have
9061 eight possibilities, each of which corresponds to the constant 0
9062 or 1 or one of the six possible comparisons.
9064 This handles common cases like (a > b) == 0 but also handles
9065 expressions like ((x > y) - (y > x)) > 0, which supposedly
9066 occur in macroized code. */
9068 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9070 tree cval1 = 0, cval2 = 0;
9073 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9074 /* Don't handle degenerate cases here; they should already
9075 have been handled anyway. */
9076 && cval1 != 0 && cval2 != 0
9077 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9078 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9079 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9080 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9081 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9082 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9083 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9085 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9086 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9088 /* We can't just pass T to eval_subst in case cval1 or cval2
9089 was the same as ARG1. */
9092 = fold_build2_loc (loc, code, type,
9093 eval_subst (loc, arg0, cval1, maxval,
9097 = fold_build2_loc (loc, code, type,
9098 eval_subst (loc, arg0, cval1, maxval,
9102 = fold_build2_loc (loc, code, type,
9103 eval_subst (loc, arg0, cval1, minval,
9107 /* All three of these results should be 0 or 1. Confirm they are.
9108 Then use those values to select the proper code to use. */
9110 if (TREE_CODE (high_result) == INTEGER_CST
9111 && TREE_CODE (equal_result) == INTEGER_CST
9112 && TREE_CODE (low_result) == INTEGER_CST)
9114 /* Make a 3-bit mask with the high-order bit being the
9115 value for `>', the next for '=', and the low for '<'. */
9116 switch ((integer_onep (high_result) * 4)
9117 + (integer_onep (equal_result) * 2)
9118 + integer_onep (low_result))
9122 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9143 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9148 tem = save_expr (build2 (code, type, cval1, cval2));
9149 SET_EXPR_LOCATION (tem, loc);
9152 return fold_build2_loc (loc, code, type, cval1, cval2);
9157 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9158 into a single range test. */
9159 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9160 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9161 && TREE_CODE (arg1) == INTEGER_CST
9162 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9163 && !integer_zerop (TREE_OPERAND (arg0, 1))
9164 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9165 && !TREE_OVERFLOW (arg1))
9167 tem = fold_div_compare (loc, code, type, arg0, arg1);
9168 if (tem != NULL_TREE)
9172 /* Fold ~X op ~Y as Y op X. */
9173 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9174 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9176 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9177 return fold_build2_loc (loc, code, type,
9178 fold_convert_loc (loc, cmp_type,
9179 TREE_OPERAND (arg1, 0)),
9180 TREE_OPERAND (arg0, 0));
9183 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9184 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9185 && TREE_CODE (arg1) == INTEGER_CST)
9187 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9188 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9189 TREE_OPERAND (arg0, 0),
9190 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9191 fold_convert_loc (loc, cmp_type, arg1)));
9198 /* Subroutine of fold_binary. Optimize complex multiplications of the
9199 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9200 argument EXPR represents the expression "z" of type TYPE. */
9203 fold_mult_zconjz (location_t loc, tree type, tree expr)
9205 tree itype = TREE_TYPE (type);
9206 tree rpart, ipart, tem;
9208 if (TREE_CODE (expr) == COMPLEX_EXPR)
9210 rpart = TREE_OPERAND (expr, 0);
9211 ipart = TREE_OPERAND (expr, 1);
9213 else if (TREE_CODE (expr) == COMPLEX_CST)
9215 rpart = TREE_REALPART (expr);
9216 ipart = TREE_IMAGPART (expr);
9220 expr = save_expr (expr);
9221 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9222 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9225 rpart = save_expr (rpart);
9226 ipart = save_expr (ipart);
9227 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9228 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9229 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9230 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9231 build_zero_cst (itype));
9235 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9236 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9237 guarantees that P and N have the same least significant log2(M) bits.
9238 N is not otherwise constrained. In particular, N is not normalized to
9239 0 <= N < M as is common. In general, the precise value of P is unknown.
9240 M is chosen as large as possible such that constant N can be determined.
9242 Returns M and sets *RESIDUE to N.
9244 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9245 account. This is not always possible due to PR 35705.
9248 static unsigned HOST_WIDE_INT
9249 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9250 bool allow_func_align)
9252 enum tree_code code;
9256 code = TREE_CODE (expr);
9257 if (code == ADDR_EXPR)
9259 expr = TREE_OPERAND (expr, 0);
9260 if (handled_component_p (expr))
9262 HOST_WIDE_INT bitsize, bitpos;
9264 enum machine_mode mode;
9265 int unsignedp, volatilep;
9267 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9268 &mode, &unsignedp, &volatilep, false);
9269 *residue = bitpos / BITS_PER_UNIT;
9272 if (TREE_CODE (offset) == INTEGER_CST)
9273 *residue += TREE_INT_CST_LOW (offset);
9275 /* We don't handle more complicated offset expressions. */
9281 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9282 return DECL_ALIGN_UNIT (expr);
9284 else if (code == POINTER_PLUS_EXPR)
9287 unsigned HOST_WIDE_INT modulus;
9288 enum tree_code inner_code;
9290 op0 = TREE_OPERAND (expr, 0);
9292 modulus = get_pointer_modulus_and_residue (op0, residue,
9295 op1 = TREE_OPERAND (expr, 1);
9297 inner_code = TREE_CODE (op1);
9298 if (inner_code == INTEGER_CST)
9300 *residue += TREE_INT_CST_LOW (op1);
9303 else if (inner_code == MULT_EXPR)
9305 op1 = TREE_OPERAND (op1, 1);
9306 if (TREE_CODE (op1) == INTEGER_CST)
9308 unsigned HOST_WIDE_INT align;
9310 /* Compute the greatest power-of-2 divisor of op1. */
9311 align = TREE_INT_CST_LOW (op1);
9314 /* If align is non-zero and less than *modulus, replace
9315 *modulus with align., If align is 0, then either op1 is 0
9316 or the greatest power-of-2 divisor of op1 doesn't fit in an
9317 unsigned HOST_WIDE_INT. In either case, no additional
9318 constraint is imposed. */
9320 modulus = MIN (modulus, align);
9327 /* If we get here, we were unable to determine anything useful about the
9333 /* Fold a binary expression of code CODE and type TYPE with operands
9334 OP0 and OP1. LOC is the location of the resulting expression.
9335 Return the folded expression if folding is successful. Otherwise,
9336 return NULL_TREE. */
9339 fold_binary_loc (location_t loc,
9340 enum tree_code code, tree type, tree op0, tree op1)
9342 enum tree_code_class kind = TREE_CODE_CLASS (code);
9343 tree arg0, arg1, tem;
9344 tree t1 = NULL_TREE;
9345 bool strict_overflow_p;
9347 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9348 && TREE_CODE_LENGTH (code) == 2
9350 && op1 != NULL_TREE);
9355 /* Strip any conversions that don't change the mode. This is
9356 safe for every expression, except for a comparison expression
9357 because its signedness is derived from its operands. So, in
9358 the latter case, only strip conversions that don't change the
9359 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9362 Note that this is done as an internal manipulation within the
9363 constant folder, in order to find the simplest representation
9364 of the arguments so that their form can be studied. In any
9365 cases, the appropriate type conversions should be put back in
9366 the tree that will get out of the constant folder. */
9368 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9370 STRIP_SIGN_NOPS (arg0);
9371 STRIP_SIGN_NOPS (arg1);
9379 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9380 constant but we can't do arithmetic on them. */
9381 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9382 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9383 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9384 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9385 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9386 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9388 if (kind == tcc_binary)
9390 /* Make sure type and arg0 have the same saturating flag. */
9391 gcc_assert (TYPE_SATURATING (type)
9392 == TYPE_SATURATING (TREE_TYPE (arg0)));
9393 tem = const_binop (code, arg0, arg1);
9395 else if (kind == tcc_comparison)
9396 tem = fold_relational_const (code, type, arg0, arg1);
9400 if (tem != NULL_TREE)
9402 if (TREE_TYPE (tem) != type)
9403 tem = fold_convert_loc (loc, type, tem);
9408 /* If this is a commutative operation, and ARG0 is a constant, move it
9409 to ARG1 to reduce the number of tests below. */
9410 if (commutative_tree_code (code)
9411 && tree_swap_operands_p (arg0, arg1, true))
9412 return fold_build2_loc (loc, code, type, op1, op0);
9414 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9416 First check for cases where an arithmetic operation is applied to a
9417 compound, conditional, or comparison operation. Push the arithmetic
9418 operation inside the compound or conditional to see if any folding
9419 can then be done. Convert comparison to conditional for this purpose.
9420 The also optimizes non-constant cases that used to be done in
9423 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9424 one of the operands is a comparison and the other is a comparison, a
9425 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9426 code below would make the expression more complex. Change it to a
9427 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9428 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9430 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9431 || code == EQ_EXPR || code == NE_EXPR)
9432 && ((truth_value_p (TREE_CODE (arg0))
9433 && (truth_value_p (TREE_CODE (arg1))
9434 || (TREE_CODE (arg1) == BIT_AND_EXPR
9435 && integer_onep (TREE_OPERAND (arg1, 1)))))
9436 || (truth_value_p (TREE_CODE (arg1))
9437 && (truth_value_p (TREE_CODE (arg0))
9438 || (TREE_CODE (arg0) == BIT_AND_EXPR
9439 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9441 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9442 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9445 fold_convert_loc (loc, boolean_type_node, arg0),
9446 fold_convert_loc (loc, boolean_type_node, arg1));
9448 if (code == EQ_EXPR)
9449 tem = invert_truthvalue_loc (loc, tem);
9451 return fold_convert_loc (loc, type, tem);
9454 if (TREE_CODE_CLASS (code) == tcc_binary
9455 || TREE_CODE_CLASS (code) == tcc_comparison)
9457 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9459 tem = fold_build2_loc (loc, code, type,
9460 fold_convert_loc (loc, TREE_TYPE (op0),
9461 TREE_OPERAND (arg0, 1)), op1);
9462 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9465 if (TREE_CODE (arg1) == COMPOUND_EXPR
9466 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9468 tem = fold_build2_loc (loc, code, type, op0,
9469 fold_convert_loc (loc, TREE_TYPE (op1),
9470 TREE_OPERAND (arg1, 1)));
9471 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9475 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9477 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9479 /*cond_first_p=*/1);
9480 if (tem != NULL_TREE)
9484 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9486 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9488 /*cond_first_p=*/0);
9489 if (tem != NULL_TREE)
9497 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9498 if (TREE_CODE (arg0) == ADDR_EXPR
9499 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9501 tree iref = TREE_OPERAND (arg0, 0);
9502 return fold_build2 (MEM_REF, type,
9503 TREE_OPERAND (iref, 0),
9504 int_const_binop (PLUS_EXPR, arg1,
9505 TREE_OPERAND (iref, 1), 0));
9508 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9509 if (TREE_CODE (arg0) == ADDR_EXPR
9510 && handled_component_p (TREE_OPERAND (arg0, 0)))
9513 HOST_WIDE_INT coffset;
9514 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9518 return fold_build2 (MEM_REF, type,
9519 build_fold_addr_expr (base),
9520 int_const_binop (PLUS_EXPR, arg1,
9521 size_int (coffset), 0));
9526 case POINTER_PLUS_EXPR:
9527 /* 0 +p index -> (type)index */
9528 if (integer_zerop (arg0))
9529 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9531 /* PTR +p 0 -> PTR */
9532 if (integer_zerop (arg1))
9533 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9535 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9536 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9537 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9538 return fold_convert_loc (loc, type,
9539 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9540 fold_convert_loc (loc, sizetype,
9542 fold_convert_loc (loc, sizetype,
9545 /* index +p PTR -> PTR +p index */
9546 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9547 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9548 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9549 fold_convert_loc (loc, type, arg1),
9550 fold_convert_loc (loc, sizetype, arg0));
9552 /* (PTR +p B) +p A -> PTR +p (B + A) */
9553 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9556 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9557 tree arg00 = TREE_OPERAND (arg0, 0);
9558 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9559 arg01, fold_convert_loc (loc, sizetype, arg1));
9560 return fold_convert_loc (loc, type,
9561 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9566 /* PTR_CST +p CST -> CST1 */
9567 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9568 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9569 fold_convert_loc (loc, type, arg1));
9571 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9572 of the array. Loop optimizer sometimes produce this type of
9574 if (TREE_CODE (arg0) == ADDR_EXPR)
9576 tem = try_move_mult_to_index (loc, arg0,
9577 fold_convert_loc (loc, sizetype, arg1));
9579 return fold_convert_loc (loc, type, tem);
9585 /* A + (-B) -> A - B */
9586 if (TREE_CODE (arg1) == NEGATE_EXPR)
9587 return fold_build2_loc (loc, MINUS_EXPR, type,
9588 fold_convert_loc (loc, type, arg0),
9589 fold_convert_loc (loc, type,
9590 TREE_OPERAND (arg1, 0)));
9591 /* (-A) + B -> B - A */
9592 if (TREE_CODE (arg0) == NEGATE_EXPR
9593 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9594 return fold_build2_loc (loc, MINUS_EXPR, type,
9595 fold_convert_loc (loc, type, arg1),
9596 fold_convert_loc (loc, type,
9597 TREE_OPERAND (arg0, 0)));
9599 if (INTEGRAL_TYPE_P (type))
9601 /* Convert ~A + 1 to -A. */
9602 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9603 && integer_onep (arg1))
9604 return fold_build1_loc (loc, NEGATE_EXPR, type,
9605 fold_convert_loc (loc, type,
9606 TREE_OPERAND (arg0, 0)));
9609 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9610 && !TYPE_OVERFLOW_TRAPS (type))
9612 tree tem = TREE_OPERAND (arg0, 0);
9615 if (operand_equal_p (tem, arg1, 0))
9617 t1 = build_int_cst_type (type, -1);
9618 return omit_one_operand_loc (loc, type, t1, arg1);
9623 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9624 && !TYPE_OVERFLOW_TRAPS (type))
9626 tree tem = TREE_OPERAND (arg1, 0);
9629 if (operand_equal_p (arg0, tem, 0))
9631 t1 = build_int_cst_type (type, -1);
9632 return omit_one_operand_loc (loc, type, t1, arg0);
9636 /* X + (X / CST) * -CST is X % CST. */
9637 if (TREE_CODE (arg1) == MULT_EXPR
9638 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9639 && operand_equal_p (arg0,
9640 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9642 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9643 tree cst1 = TREE_OPERAND (arg1, 1);
9644 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9646 if (sum && integer_zerop (sum))
9647 return fold_convert_loc (loc, type,
9648 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9649 TREE_TYPE (arg0), arg0,
9654 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9655 same or one. Make sure type is not saturating.
9656 fold_plusminus_mult_expr will re-associate. */
9657 if ((TREE_CODE (arg0) == MULT_EXPR
9658 || TREE_CODE (arg1) == MULT_EXPR)
9659 && !TYPE_SATURATING (type)
9660 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9662 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9667 if (! FLOAT_TYPE_P (type))
9669 if (integer_zerop (arg1))
9670 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9672 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9673 with a constant, and the two constants have no bits in common,
9674 we should treat this as a BIT_IOR_EXPR since this may produce more
9676 if (TREE_CODE (arg0) == BIT_AND_EXPR
9677 && TREE_CODE (arg1) == BIT_AND_EXPR
9678 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9679 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9680 && integer_zerop (const_binop (BIT_AND_EXPR,
9681 TREE_OPERAND (arg0, 1),
9682 TREE_OPERAND (arg1, 1))))
9684 code = BIT_IOR_EXPR;
9688 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9689 (plus (plus (mult) (mult)) (foo)) so that we can
9690 take advantage of the factoring cases below. */
9691 if (((TREE_CODE (arg0) == PLUS_EXPR
9692 || TREE_CODE (arg0) == MINUS_EXPR)
9693 && TREE_CODE (arg1) == MULT_EXPR)
9694 || ((TREE_CODE (arg1) == PLUS_EXPR
9695 || TREE_CODE (arg1) == MINUS_EXPR)
9696 && TREE_CODE (arg0) == MULT_EXPR))
9698 tree parg0, parg1, parg, marg;
9699 enum tree_code pcode;
9701 if (TREE_CODE (arg1) == MULT_EXPR)
9702 parg = arg0, marg = arg1;
9704 parg = arg1, marg = arg0;
9705 pcode = TREE_CODE (parg);
9706 parg0 = TREE_OPERAND (parg, 0);
9707 parg1 = TREE_OPERAND (parg, 1);
9711 if (TREE_CODE (parg0) == MULT_EXPR
9712 && TREE_CODE (parg1) != MULT_EXPR)
9713 return fold_build2_loc (loc, pcode, type,
9714 fold_build2_loc (loc, PLUS_EXPR, type,
9715 fold_convert_loc (loc, type,
9717 fold_convert_loc (loc, type,
9719 fold_convert_loc (loc, type, parg1));
9720 if (TREE_CODE (parg0) != MULT_EXPR
9721 && TREE_CODE (parg1) == MULT_EXPR)
9723 fold_build2_loc (loc, PLUS_EXPR, type,
9724 fold_convert_loc (loc, type, parg0),
9725 fold_build2_loc (loc, pcode, type,
9726 fold_convert_loc (loc, type, marg),
9727 fold_convert_loc (loc, type,
9733 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9734 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9735 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9737 /* Likewise if the operands are reversed. */
9738 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9739 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9741 /* Convert X + -C into X - C. */
9742 if (TREE_CODE (arg1) == REAL_CST
9743 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9745 tem = fold_negate_const (arg1, type);
9746 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9747 return fold_build2_loc (loc, MINUS_EXPR, type,
9748 fold_convert_loc (loc, type, arg0),
9749 fold_convert_loc (loc, type, tem));
9752 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9753 to __complex__ ( x, y ). This is not the same for SNaNs or
9754 if signed zeros are involved. */
9755 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9756 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9757 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9759 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9760 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9761 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9762 bool arg0rz = false, arg0iz = false;
9763 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9764 || (arg0i && (arg0iz = real_zerop (arg0i))))
9766 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9767 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9768 if (arg0rz && arg1i && real_zerop (arg1i))
9770 tree rp = arg1r ? arg1r
9771 : build1 (REALPART_EXPR, rtype, arg1);
9772 tree ip = arg0i ? arg0i
9773 : build1 (IMAGPART_EXPR, rtype, arg0);
9774 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9776 else if (arg0iz && arg1r && real_zerop (arg1r))
9778 tree rp = arg0r ? arg0r
9779 : build1 (REALPART_EXPR, rtype, arg0);
9780 tree ip = arg1i ? arg1i
9781 : build1 (IMAGPART_EXPR, rtype, arg1);
9782 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9787 if (flag_unsafe_math_optimizations
9788 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9789 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9790 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9793 /* Convert x+x into x*2.0. */
9794 if (operand_equal_p (arg0, arg1, 0)
9795 && SCALAR_FLOAT_TYPE_P (type))
9796 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9797 build_real (type, dconst2));
9799 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9800 We associate floats only if the user has specified
9801 -fassociative-math. */
9802 if (flag_associative_math
9803 && TREE_CODE (arg1) == PLUS_EXPR
9804 && TREE_CODE (arg0) != MULT_EXPR)
9806 tree tree10 = TREE_OPERAND (arg1, 0);
9807 tree tree11 = TREE_OPERAND (arg1, 1);
9808 if (TREE_CODE (tree11) == MULT_EXPR
9809 && TREE_CODE (tree10) == MULT_EXPR)
9812 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9813 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9816 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9817 We associate floats only if the user has specified
9818 -fassociative-math. */
9819 if (flag_associative_math
9820 && TREE_CODE (arg0) == PLUS_EXPR
9821 && TREE_CODE (arg1) != MULT_EXPR)
9823 tree tree00 = TREE_OPERAND (arg0, 0);
9824 tree tree01 = TREE_OPERAND (arg0, 1);
9825 if (TREE_CODE (tree01) == MULT_EXPR
9826 && TREE_CODE (tree00) == MULT_EXPR)
9829 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9830 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9836 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9837 is a rotate of A by C1 bits. */
9838 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9839 is a rotate of A by B bits. */
9841 enum tree_code code0, code1;
9843 code0 = TREE_CODE (arg0);
9844 code1 = TREE_CODE (arg1);
9845 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9846 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9847 && operand_equal_p (TREE_OPERAND (arg0, 0),
9848 TREE_OPERAND (arg1, 0), 0)
9849 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9850 TYPE_UNSIGNED (rtype))
9851 /* Only create rotates in complete modes. Other cases are not
9852 expanded properly. */
9853 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9855 tree tree01, tree11;
9856 enum tree_code code01, code11;
9858 tree01 = TREE_OPERAND (arg0, 1);
9859 tree11 = TREE_OPERAND (arg1, 1);
9860 STRIP_NOPS (tree01);
9861 STRIP_NOPS (tree11);
9862 code01 = TREE_CODE (tree01);
9863 code11 = TREE_CODE (tree11);
9864 if (code01 == INTEGER_CST
9865 && code11 == INTEGER_CST
9866 && TREE_INT_CST_HIGH (tree01) == 0
9867 && TREE_INT_CST_HIGH (tree11) == 0
9868 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9869 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9871 tem = build2_loc (loc, LROTATE_EXPR,
9872 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9873 TREE_OPERAND (arg0, 0),
9874 code0 == LSHIFT_EXPR ? tree01 : tree11);
9875 return fold_convert_loc (loc, type, tem);
9877 else if (code11 == MINUS_EXPR)
9879 tree tree110, tree111;
9880 tree110 = TREE_OPERAND (tree11, 0);
9881 tree111 = TREE_OPERAND (tree11, 1);
9882 STRIP_NOPS (tree110);
9883 STRIP_NOPS (tree111);
9884 if (TREE_CODE (tree110) == INTEGER_CST
9885 && 0 == compare_tree_int (tree110,
9887 (TREE_TYPE (TREE_OPERAND
9889 && operand_equal_p (tree01, tree111, 0))
9891 fold_convert_loc (loc, type,
9892 build2 ((code0 == LSHIFT_EXPR
9895 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9896 TREE_OPERAND (arg0, 0), tree01));
9898 else if (code01 == MINUS_EXPR)
9900 tree tree010, tree011;
9901 tree010 = TREE_OPERAND (tree01, 0);
9902 tree011 = TREE_OPERAND (tree01, 1);
9903 STRIP_NOPS (tree010);
9904 STRIP_NOPS (tree011);
9905 if (TREE_CODE (tree010) == INTEGER_CST
9906 && 0 == compare_tree_int (tree010,
9908 (TREE_TYPE (TREE_OPERAND
9910 && operand_equal_p (tree11, tree011, 0))
9911 return fold_convert_loc
9913 build2 ((code0 != LSHIFT_EXPR
9916 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9917 TREE_OPERAND (arg0, 0), tree11));
9923 /* In most languages, can't associate operations on floats through
9924 parentheses. Rather than remember where the parentheses were, we
9925 don't associate floats at all, unless the user has specified
9927 And, we need to make sure type is not saturating. */
9929 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9930 && !TYPE_SATURATING (type))
9932 tree var0, con0, lit0, minus_lit0;
9933 tree var1, con1, lit1, minus_lit1;
9936 /* Split both trees into variables, constants, and literals. Then
9937 associate each group together, the constants with literals,
9938 then the result with variables. This increases the chances of
9939 literals being recombined later and of generating relocatable
9940 expressions for the sum of a constant and literal. */
9941 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9942 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9943 code == MINUS_EXPR);
9945 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9946 if (code == MINUS_EXPR)
9949 /* With undefined overflow we can only associate constants with one
9950 variable, and constants whose association doesn't overflow. */
9951 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9952 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9959 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9960 tmp0 = TREE_OPERAND (tmp0, 0);
9961 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9962 tmp1 = TREE_OPERAND (tmp1, 0);
9963 /* The only case we can still associate with two variables
9964 is if they are the same, modulo negation. */
9965 if (!operand_equal_p (tmp0, tmp1, 0))
9969 if (ok && lit0 && lit1)
9971 tree tmp0 = fold_convert (type, lit0);
9972 tree tmp1 = fold_convert (type, lit1);
9974 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9975 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9980 /* Only do something if we found more than two objects. Otherwise,
9981 nothing has changed and we risk infinite recursion. */
9983 && (2 < ((var0 != 0) + (var1 != 0)
9984 + (con0 != 0) + (con1 != 0)
9985 + (lit0 != 0) + (lit1 != 0)
9986 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9988 var0 = associate_trees (loc, var0, var1, code, type);
9989 con0 = associate_trees (loc, con0, con1, code, type);
9990 lit0 = associate_trees (loc, lit0, lit1, code, type);
9991 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9993 /* Preserve the MINUS_EXPR if the negative part of the literal is
9994 greater than the positive part. Otherwise, the multiplicative
9995 folding code (i.e extract_muldiv) may be fooled in case
9996 unsigned constants are subtracted, like in the following
9997 example: ((X*2 + 4) - 8U)/2. */
9998 if (minus_lit0 && lit0)
10000 if (TREE_CODE (lit0) == INTEGER_CST
10001 && TREE_CODE (minus_lit0) == INTEGER_CST
10002 && tree_int_cst_lt (lit0, minus_lit0))
10004 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10010 lit0 = associate_trees (loc, lit0, minus_lit0,
10019 fold_convert_loc (loc, type,
10020 associate_trees (loc, var0, minus_lit0,
10021 MINUS_EXPR, type));
10024 con0 = associate_trees (loc, con0, minus_lit0,
10027 fold_convert_loc (loc, type,
10028 associate_trees (loc, var0, con0,
10033 con0 = associate_trees (loc, con0, lit0, code, type);
10035 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10043 /* Pointer simplifications for subtraction, simple reassociations. */
10044 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10046 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10047 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10048 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10050 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10051 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10052 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10053 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10054 return fold_build2_loc (loc, PLUS_EXPR, type,
10055 fold_build2_loc (loc, MINUS_EXPR, type,
10057 fold_build2_loc (loc, MINUS_EXPR, type,
10060 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10061 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10063 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10064 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10065 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10066 fold_convert_loc (loc, type, arg1));
10068 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10071 /* A - (-B) -> A + B */
10072 if (TREE_CODE (arg1) == NEGATE_EXPR)
10073 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10074 fold_convert_loc (loc, type,
10075 TREE_OPERAND (arg1, 0)));
10076 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10077 if (TREE_CODE (arg0) == NEGATE_EXPR
10078 && (FLOAT_TYPE_P (type)
10079 || INTEGRAL_TYPE_P (type))
10080 && negate_expr_p (arg1)
10081 && reorder_operands_p (arg0, arg1))
10082 return fold_build2_loc (loc, MINUS_EXPR, type,
10083 fold_convert_loc (loc, type,
10084 negate_expr (arg1)),
10085 fold_convert_loc (loc, type,
10086 TREE_OPERAND (arg0, 0)));
10087 /* Convert -A - 1 to ~A. */
10088 if (INTEGRAL_TYPE_P (type)
10089 && TREE_CODE (arg0) == NEGATE_EXPR
10090 && integer_onep (arg1)
10091 && !TYPE_OVERFLOW_TRAPS (type))
10092 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10093 fold_convert_loc (loc, type,
10094 TREE_OPERAND (arg0, 0)));
10096 /* Convert -1 - A to ~A. */
10097 if (INTEGRAL_TYPE_P (type)
10098 && integer_all_onesp (arg0))
10099 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10102 /* X - (X / CST) * CST is X % CST. */
10103 if (INTEGRAL_TYPE_P (type)
10104 && TREE_CODE (arg1) == MULT_EXPR
10105 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10106 && operand_equal_p (arg0,
10107 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10108 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10109 TREE_OPERAND (arg1, 1), 0))
10111 fold_convert_loc (loc, type,
10112 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10113 arg0, TREE_OPERAND (arg1, 1)));
10115 if (! FLOAT_TYPE_P (type))
10117 if (integer_zerop (arg0))
10118 return negate_expr (fold_convert_loc (loc, type, arg1));
10119 if (integer_zerop (arg1))
10120 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10122 /* Fold A - (A & B) into ~B & A. */
10123 if (!TREE_SIDE_EFFECTS (arg0)
10124 && TREE_CODE (arg1) == BIT_AND_EXPR)
10126 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10128 tree arg10 = fold_convert_loc (loc, type,
10129 TREE_OPERAND (arg1, 0));
10130 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10131 fold_build1_loc (loc, BIT_NOT_EXPR,
10133 fold_convert_loc (loc, type, arg0));
10135 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10137 tree arg11 = fold_convert_loc (loc,
10138 type, TREE_OPERAND (arg1, 1));
10139 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10140 fold_build1_loc (loc, BIT_NOT_EXPR,
10142 fold_convert_loc (loc, type, arg0));
10146 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10147 any power of 2 minus 1. */
10148 if (TREE_CODE (arg0) == BIT_AND_EXPR
10149 && TREE_CODE (arg1) == BIT_AND_EXPR
10150 && operand_equal_p (TREE_OPERAND (arg0, 0),
10151 TREE_OPERAND (arg1, 0), 0))
10153 tree mask0 = TREE_OPERAND (arg0, 1);
10154 tree mask1 = TREE_OPERAND (arg1, 1);
10155 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10157 if (operand_equal_p (tem, mask1, 0))
10159 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10160 TREE_OPERAND (arg0, 0), mask1);
10161 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10166 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10167 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10168 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10170 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10171 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10172 (-ARG1 + ARG0) reduces to -ARG1. */
10173 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10174 return negate_expr (fold_convert_loc (loc, type, arg1));
10176 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10177 __complex__ ( x, -y ). This is not the same for SNaNs or if
10178 signed zeros are involved. */
10179 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10180 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10181 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10183 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10184 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10185 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10186 bool arg0rz = false, arg0iz = false;
10187 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10188 || (arg0i && (arg0iz = real_zerop (arg0i))))
10190 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10191 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10192 if (arg0rz && arg1i && real_zerop (arg1i))
10194 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10196 : build1 (REALPART_EXPR, rtype, arg1));
10197 tree ip = arg0i ? arg0i
10198 : build1 (IMAGPART_EXPR, rtype, arg0);
10199 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10201 else if (arg0iz && arg1r && real_zerop (arg1r))
10203 tree rp = arg0r ? arg0r
10204 : build1 (REALPART_EXPR, rtype, arg0);
10205 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10207 : build1 (IMAGPART_EXPR, rtype, arg1));
10208 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10213 /* Fold &x - &x. This can happen from &x.foo - &x.
10214 This is unsafe for certain floats even in non-IEEE formats.
10215 In IEEE, it is unsafe because it does wrong for NaNs.
10216 Also note that operand_equal_p is always false if an operand
10219 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10220 && operand_equal_p (arg0, arg1, 0))
10221 return build_zero_cst (type);
10223 /* A - B -> A + (-B) if B is easily negatable. */
10224 if (negate_expr_p (arg1)
10225 && ((FLOAT_TYPE_P (type)
10226 /* Avoid this transformation if B is a positive REAL_CST. */
10227 && (TREE_CODE (arg1) != REAL_CST
10228 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10229 || INTEGRAL_TYPE_P (type)))
10230 return fold_build2_loc (loc, PLUS_EXPR, type,
10231 fold_convert_loc (loc, type, arg0),
10232 fold_convert_loc (loc, type,
10233 negate_expr (arg1)));
10235 /* Try folding difference of addresses. */
10237 HOST_WIDE_INT diff;
10239 if ((TREE_CODE (arg0) == ADDR_EXPR
10240 || TREE_CODE (arg1) == ADDR_EXPR)
10241 && ptr_difference_const (arg0, arg1, &diff))
10242 return build_int_cst_type (type, diff);
10245 /* Fold &a[i] - &a[j] to i-j. */
10246 if (TREE_CODE (arg0) == ADDR_EXPR
10247 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10248 && TREE_CODE (arg1) == ADDR_EXPR
10249 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10251 tree aref0 = TREE_OPERAND (arg0, 0);
10252 tree aref1 = TREE_OPERAND (arg1, 0);
10253 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10254 TREE_OPERAND (aref1, 0), 0))
10256 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10257 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10258 tree esz = array_ref_element_size (aref0);
10259 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10260 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10261 fold_convert_loc (loc, type, esz));
10266 if (FLOAT_TYPE_P (type)
10267 && flag_unsafe_math_optimizations
10268 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10269 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10270 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10273 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10274 same or one. Make sure type is not saturating.
10275 fold_plusminus_mult_expr will re-associate. */
10276 if ((TREE_CODE (arg0) == MULT_EXPR
10277 || TREE_CODE (arg1) == MULT_EXPR)
10278 && !TYPE_SATURATING (type)
10279 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10281 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10289 /* (-A) * (-B) -> A * B */
10290 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10291 return fold_build2_loc (loc, MULT_EXPR, type,
10292 fold_convert_loc (loc, type,
10293 TREE_OPERAND (arg0, 0)),
10294 fold_convert_loc (loc, type,
10295 negate_expr (arg1)));
10296 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10297 return fold_build2_loc (loc, MULT_EXPR, type,
10298 fold_convert_loc (loc, type,
10299 negate_expr (arg0)),
10300 fold_convert_loc (loc, type,
10301 TREE_OPERAND (arg1, 0)));
10303 if (! FLOAT_TYPE_P (type))
10305 if (integer_zerop (arg1))
10306 return omit_one_operand_loc (loc, type, arg1, arg0);
10307 if (integer_onep (arg1))
10308 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10309 /* Transform x * -1 into -x. Make sure to do the negation
10310 on the original operand with conversions not stripped
10311 because we can only strip non-sign-changing conversions. */
10312 if (integer_all_onesp (arg1))
10313 return fold_convert_loc (loc, type, negate_expr (op0));
10314 /* Transform x * -C into -x * C if x is easily negatable. */
10315 if (TREE_CODE (arg1) == INTEGER_CST
10316 && tree_int_cst_sgn (arg1) == -1
10317 && negate_expr_p (arg0)
10318 && (tem = negate_expr (arg1)) != arg1
10319 && !TREE_OVERFLOW (tem))
10320 return fold_build2_loc (loc, MULT_EXPR, type,
10321 fold_convert_loc (loc, type,
10322 negate_expr (arg0)),
10325 /* (a * (1 << b)) is (a << b) */
10326 if (TREE_CODE (arg1) == LSHIFT_EXPR
10327 && integer_onep (TREE_OPERAND (arg1, 0)))
10328 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10329 TREE_OPERAND (arg1, 1));
10330 if (TREE_CODE (arg0) == LSHIFT_EXPR
10331 && integer_onep (TREE_OPERAND (arg0, 0)))
10332 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10333 TREE_OPERAND (arg0, 1));
10335 /* (A + A) * C -> A * 2 * C */
10336 if (TREE_CODE (arg0) == PLUS_EXPR
10337 && TREE_CODE (arg1) == INTEGER_CST
10338 && operand_equal_p (TREE_OPERAND (arg0, 0),
10339 TREE_OPERAND (arg0, 1), 0))
10340 return fold_build2_loc (loc, MULT_EXPR, type,
10341 omit_one_operand_loc (loc, type,
10342 TREE_OPERAND (arg0, 0),
10343 TREE_OPERAND (arg0, 1)),
10344 fold_build2_loc (loc, MULT_EXPR, type,
10345 build_int_cst (type, 2) , arg1));
10347 strict_overflow_p = false;
10348 if (TREE_CODE (arg1) == INTEGER_CST
10349 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10350 &strict_overflow_p)))
10352 if (strict_overflow_p)
10353 fold_overflow_warning (("assuming signed overflow does not "
10354 "occur when simplifying "
10356 WARN_STRICT_OVERFLOW_MISC);
10357 return fold_convert_loc (loc, type, tem);
10360 /* Optimize z * conj(z) for integer complex numbers. */
10361 if (TREE_CODE (arg0) == CONJ_EXPR
10362 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10363 return fold_mult_zconjz (loc, type, arg1);
10364 if (TREE_CODE (arg1) == CONJ_EXPR
10365 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10366 return fold_mult_zconjz (loc, type, arg0);
10370 /* Maybe fold x * 0 to 0. The expressions aren't the same
10371 when x is NaN, since x * 0 is also NaN. Nor are they the
10372 same in modes with signed zeros, since multiplying a
10373 negative value by 0 gives -0, not +0. */
10374 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10375 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10376 && real_zerop (arg1))
10377 return omit_one_operand_loc (loc, type, arg1, arg0);
10378 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10379 Likewise for complex arithmetic with signed zeros. */
10380 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10381 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10382 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10383 && real_onep (arg1))
10384 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10386 /* Transform x * -1.0 into -x. */
10387 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10388 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10389 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10390 && real_minus_onep (arg1))
10391 return fold_convert_loc (loc, type, negate_expr (arg0));
10393 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10394 the result for floating point types due to rounding so it is applied
10395 only if -fassociative-math was specify. */
10396 if (flag_associative_math
10397 && TREE_CODE (arg0) == RDIV_EXPR
10398 && TREE_CODE (arg1) == REAL_CST
10399 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10401 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10404 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10405 TREE_OPERAND (arg0, 1));
10408 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10409 if (operand_equal_p (arg0, arg1, 0))
10411 tree tem = fold_strip_sign_ops (arg0);
10412 if (tem != NULL_TREE)
10414 tem = fold_convert_loc (loc, type, tem);
10415 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10419 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10420 This is not the same for NaNs or if signed zeros are
10422 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10423 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10424 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10425 && TREE_CODE (arg1) == COMPLEX_CST
10426 && real_zerop (TREE_REALPART (arg1)))
10428 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10429 if (real_onep (TREE_IMAGPART (arg1)))
10431 fold_build2_loc (loc, COMPLEX_EXPR, type,
10432 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10434 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10435 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10437 fold_build2_loc (loc, COMPLEX_EXPR, type,
10438 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10439 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10443 /* Optimize z * conj(z) for floating point complex numbers.
10444 Guarded by flag_unsafe_math_optimizations as non-finite
10445 imaginary components don't produce scalar results. */
10446 if (flag_unsafe_math_optimizations
10447 && TREE_CODE (arg0) == CONJ_EXPR
10448 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10449 return fold_mult_zconjz (loc, type, arg1);
10450 if (flag_unsafe_math_optimizations
10451 && TREE_CODE (arg1) == CONJ_EXPR
10452 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10453 return fold_mult_zconjz (loc, type, arg0);
10455 if (flag_unsafe_math_optimizations)
10457 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10458 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10460 /* Optimizations of root(...)*root(...). */
10461 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10464 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10465 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10467 /* Optimize sqrt(x)*sqrt(x) as x. */
10468 if (BUILTIN_SQRT_P (fcode0)
10469 && operand_equal_p (arg00, arg10, 0)
10470 && ! HONOR_SNANS (TYPE_MODE (type)))
10473 /* Optimize root(x)*root(y) as root(x*y). */
10474 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10475 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10476 return build_call_expr_loc (loc, rootfn, 1, arg);
10479 /* Optimize expN(x)*expN(y) as expN(x+y). */
10480 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10482 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10483 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10484 CALL_EXPR_ARG (arg0, 0),
10485 CALL_EXPR_ARG (arg1, 0));
10486 return build_call_expr_loc (loc, expfn, 1, arg);
10489 /* Optimizations of pow(...)*pow(...). */
10490 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10491 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10492 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10494 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10495 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10496 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10497 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10499 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10500 if (operand_equal_p (arg01, arg11, 0))
10502 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10503 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10505 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10508 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10509 if (operand_equal_p (arg00, arg10, 0))
10511 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10512 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10514 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10518 /* Optimize tan(x)*cos(x) as sin(x). */
10519 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10520 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10521 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10522 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10523 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10524 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10525 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10526 CALL_EXPR_ARG (arg1, 0), 0))
10528 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10530 if (sinfn != NULL_TREE)
10531 return build_call_expr_loc (loc, sinfn, 1,
10532 CALL_EXPR_ARG (arg0, 0));
10535 /* Optimize x*pow(x,c) as pow(x,c+1). */
10536 if (fcode1 == BUILT_IN_POW
10537 || fcode1 == BUILT_IN_POWF
10538 || fcode1 == BUILT_IN_POWL)
10540 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10541 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10542 if (TREE_CODE (arg11) == REAL_CST
10543 && !TREE_OVERFLOW (arg11)
10544 && operand_equal_p (arg0, arg10, 0))
10546 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10550 c = TREE_REAL_CST (arg11);
10551 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10552 arg = build_real (type, c);
10553 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10557 /* Optimize pow(x,c)*x as pow(x,c+1). */
10558 if (fcode0 == BUILT_IN_POW
10559 || fcode0 == BUILT_IN_POWF
10560 || fcode0 == BUILT_IN_POWL)
10562 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10563 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10564 if (TREE_CODE (arg01) == REAL_CST
10565 && !TREE_OVERFLOW (arg01)
10566 && operand_equal_p (arg1, arg00, 0))
10568 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10572 c = TREE_REAL_CST (arg01);
10573 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10574 arg = build_real (type, c);
10575 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10579 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10580 if (optimize_function_for_speed_p (cfun)
10581 && operand_equal_p (arg0, arg1, 0))
10583 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10587 tree arg = build_real (type, dconst2);
10588 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10597 if (integer_all_onesp (arg1))
10598 return omit_one_operand_loc (loc, type, arg1, arg0);
10599 if (integer_zerop (arg1))
10600 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10601 if (operand_equal_p (arg0, arg1, 0))
10602 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10604 /* ~X | X is -1. */
10605 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10606 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10608 t1 = build_zero_cst (type);
10609 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10610 return omit_one_operand_loc (loc, type, t1, arg1);
10613 /* X | ~X is -1. */
10614 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10615 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10617 t1 = build_zero_cst (type);
10618 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10619 return omit_one_operand_loc (loc, type, t1, arg0);
10622 /* Canonicalize (X & C1) | C2. */
10623 if (TREE_CODE (arg0) == BIT_AND_EXPR
10624 && TREE_CODE (arg1) == INTEGER_CST
10625 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10627 double_int c1, c2, c3, msk;
10628 int width = TYPE_PRECISION (type), w;
10629 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
10630 c2 = tree_to_double_int (arg1);
10632 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10633 if (double_int_equal_p (double_int_and (c1, c2), c1))
10634 return omit_one_operand_loc (loc, type, arg1,
10635 TREE_OPERAND (arg0, 0));
10637 msk = double_int_mask (width);
10639 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10640 if (double_int_zero_p (double_int_and_not (msk,
10641 double_int_ior (c1, c2))))
10642 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10643 TREE_OPERAND (arg0, 0), arg1);
10645 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10646 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10647 mode which allows further optimizations. */
10648 c1 = double_int_and (c1, msk);
10649 c2 = double_int_and (c2, msk);
10650 c3 = double_int_and_not (c1, c2);
10651 for (w = BITS_PER_UNIT;
10652 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10655 unsigned HOST_WIDE_INT mask
10656 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10657 if (((c1.low | c2.low) & mask) == mask
10658 && (c1.low & ~mask) == 0 && c1.high == 0)
10660 c3 = uhwi_to_double_int (mask);
10664 if (!double_int_equal_p (c3, c1))
10665 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10666 fold_build2_loc (loc, BIT_AND_EXPR, type,
10667 TREE_OPERAND (arg0, 0),
10668 double_int_to_tree (type,
10673 /* (X & Y) | Y is (X, Y). */
10674 if (TREE_CODE (arg0) == BIT_AND_EXPR
10675 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10676 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10677 /* (X & Y) | X is (Y, X). */
10678 if (TREE_CODE (arg0) == BIT_AND_EXPR
10679 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10680 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10681 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10682 /* X | (X & Y) is (Y, X). */
10683 if (TREE_CODE (arg1) == BIT_AND_EXPR
10684 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10685 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10686 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10687 /* X | (Y & X) is (Y, X). */
10688 if (TREE_CODE (arg1) == BIT_AND_EXPR
10689 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10690 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10691 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10693 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10694 if (t1 != NULL_TREE)
10697 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10699 This results in more efficient code for machines without a NAND
10700 instruction. Combine will canonicalize to the first form
10701 which will allow use of NAND instructions provided by the
10702 backend if they exist. */
10703 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10704 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10707 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10708 build2 (BIT_AND_EXPR, type,
10709 fold_convert_loc (loc, type,
10710 TREE_OPERAND (arg0, 0)),
10711 fold_convert_loc (loc, type,
10712 TREE_OPERAND (arg1, 0))));
10715 /* See if this can be simplified into a rotate first. If that
10716 is unsuccessful continue in the association code. */
10720 if (integer_zerop (arg1))
10721 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10722 if (integer_all_onesp (arg1))
10723 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10724 if (operand_equal_p (arg0, arg1, 0))
10725 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10727 /* ~X ^ X is -1. */
10728 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10729 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10731 t1 = build_zero_cst (type);
10732 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10733 return omit_one_operand_loc (loc, type, t1, arg1);
10736 /* X ^ ~X is -1. */
10737 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10738 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10740 t1 = build_zero_cst (type);
10741 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10742 return omit_one_operand_loc (loc, type, t1, arg0);
10745 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10746 with a constant, and the two constants have no bits in common,
10747 we should treat this as a BIT_IOR_EXPR since this may produce more
10748 simplifications. */
10749 if (TREE_CODE (arg0) == BIT_AND_EXPR
10750 && TREE_CODE (arg1) == BIT_AND_EXPR
10751 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10752 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10753 && integer_zerop (const_binop (BIT_AND_EXPR,
10754 TREE_OPERAND (arg0, 1),
10755 TREE_OPERAND (arg1, 1))))
10757 code = BIT_IOR_EXPR;
10761 /* (X | Y) ^ X -> Y & ~ X*/
10762 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10763 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10765 tree t2 = TREE_OPERAND (arg0, 1);
10766 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10768 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10769 fold_convert_loc (loc, type, t2),
10770 fold_convert_loc (loc, type, t1));
10774 /* (Y | X) ^ X -> Y & ~ X*/
10775 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10776 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10778 tree t2 = TREE_OPERAND (arg0, 0);
10779 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10781 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10782 fold_convert_loc (loc, type, t2),
10783 fold_convert_loc (loc, type, t1));
10787 /* X ^ (X | Y) -> Y & ~ X*/
10788 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10789 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10791 tree t2 = TREE_OPERAND (arg1, 1);
10792 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10794 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10795 fold_convert_loc (loc, type, t2),
10796 fold_convert_loc (loc, type, t1));
10800 /* X ^ (Y | X) -> Y & ~ X*/
10801 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10802 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10804 tree t2 = TREE_OPERAND (arg1, 0);
10805 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10807 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10808 fold_convert_loc (loc, type, t2),
10809 fold_convert_loc (loc, type, t1));
10813 /* Convert ~X ^ ~Y to X ^ Y. */
10814 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10815 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10816 return fold_build2_loc (loc, code, type,
10817 fold_convert_loc (loc, type,
10818 TREE_OPERAND (arg0, 0)),
10819 fold_convert_loc (loc, type,
10820 TREE_OPERAND (arg1, 0)));
10822 /* Convert ~X ^ C to X ^ ~C. */
10823 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10824 && TREE_CODE (arg1) == INTEGER_CST)
10825 return fold_build2_loc (loc, code, type,
10826 fold_convert_loc (loc, type,
10827 TREE_OPERAND (arg0, 0)),
10828 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10830 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10831 if (TREE_CODE (arg0) == BIT_AND_EXPR
10832 && integer_onep (TREE_OPERAND (arg0, 1))
10833 && integer_onep (arg1))
10834 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10835 build_int_cst (TREE_TYPE (arg0), 0));
10837 /* Fold (X & Y) ^ Y as ~X & Y. */
10838 if (TREE_CODE (arg0) == BIT_AND_EXPR
10839 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10841 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10842 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10843 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10844 fold_convert_loc (loc, type, arg1));
10846 /* Fold (X & Y) ^ X as ~Y & X. */
10847 if (TREE_CODE (arg0) == BIT_AND_EXPR
10848 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10849 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10851 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10852 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10853 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10854 fold_convert_loc (loc, type, arg1));
10856 /* Fold X ^ (X & Y) as X & ~Y. */
10857 if (TREE_CODE (arg1) == BIT_AND_EXPR
10858 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10860 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10861 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10862 fold_convert_loc (loc, type, arg0),
10863 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10865 /* Fold X ^ (Y & X) as ~Y & X. */
10866 if (TREE_CODE (arg1) == BIT_AND_EXPR
10867 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10868 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10870 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10871 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10872 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10873 fold_convert_loc (loc, type, arg0));
10876 /* See if this can be simplified into a rotate first. If that
10877 is unsuccessful continue in the association code. */
10881 if (integer_all_onesp (arg1))
10882 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10883 if (integer_zerop (arg1))
10884 return omit_one_operand_loc (loc, type, arg1, arg0);
10885 if (operand_equal_p (arg0, arg1, 0))
10886 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10888 /* ~X & X is always zero. */
10889 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10890 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10891 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10893 /* X & ~X is always zero. */
10894 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10895 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10896 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10898 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10899 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10900 && TREE_CODE (arg1) == INTEGER_CST
10901 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10903 tree tmp1 = fold_convert_loc (loc, type, arg1);
10904 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10905 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10906 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10907 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10909 fold_convert_loc (loc, type,
10910 fold_build2_loc (loc, BIT_IOR_EXPR,
10911 type, tmp2, tmp3));
10914 /* (X | Y) & Y is (X, Y). */
10915 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10916 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10917 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10918 /* (X | Y) & X is (Y, X). */
10919 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10920 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10921 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10922 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10923 /* X & (X | Y) is (Y, X). */
10924 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10925 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10926 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10927 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10928 /* X & (Y | X) is (Y, X). */
10929 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10930 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10931 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10932 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10934 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10935 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10936 && integer_onep (TREE_OPERAND (arg0, 1))
10937 && integer_onep (arg1))
10939 tem = TREE_OPERAND (arg0, 0);
10940 return fold_build2_loc (loc, EQ_EXPR, type,
10941 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10942 build_int_cst (TREE_TYPE (tem), 1)),
10943 build_int_cst (TREE_TYPE (tem), 0));
10945 /* Fold ~X & 1 as (X & 1) == 0. */
10946 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10947 && integer_onep (arg1))
10949 tem = TREE_OPERAND (arg0, 0);
10950 return fold_build2_loc (loc, EQ_EXPR, type,
10951 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10952 build_int_cst (TREE_TYPE (tem), 1)),
10953 build_int_cst (TREE_TYPE (tem), 0));
10956 /* Fold (X ^ Y) & Y as ~X & Y. */
10957 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10958 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10960 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10961 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10962 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10963 fold_convert_loc (loc, type, arg1));
10965 /* Fold (X ^ Y) & X as ~Y & X. */
10966 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10968 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10970 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10971 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10972 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10973 fold_convert_loc (loc, type, arg1));
10975 /* Fold X & (X ^ Y) as X & ~Y. */
10976 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10977 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10979 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10980 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10981 fold_convert_loc (loc, type, arg0),
10982 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10984 /* Fold X & (Y ^ X) as ~Y & X. */
10985 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10986 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10987 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10989 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10990 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10991 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10992 fold_convert_loc (loc, type, arg0));
10995 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10996 ((A & N) + B) & M -> (A + B) & M
10997 Similarly if (N & M) == 0,
10998 ((A | N) + B) & M -> (A + B) & M
10999 and for - instead of + (or unary - instead of +)
11000 and/or ^ instead of |.
11001 If B is constant and (B & M) == 0, fold into A & M. */
11002 if (host_integerp (arg1, 1))
11004 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11005 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11006 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11007 && (TREE_CODE (arg0) == PLUS_EXPR
11008 || TREE_CODE (arg0) == MINUS_EXPR
11009 || TREE_CODE (arg0) == NEGATE_EXPR)
11010 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11011 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11015 unsigned HOST_WIDE_INT cst0;
11017 /* Now we know that arg0 is (C + D) or (C - D) or
11018 -C and arg1 (M) is == (1LL << cst) - 1.
11019 Store C into PMOP[0] and D into PMOP[1]. */
11020 pmop[0] = TREE_OPERAND (arg0, 0);
11022 if (TREE_CODE (arg0) != NEGATE_EXPR)
11024 pmop[1] = TREE_OPERAND (arg0, 1);
11028 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11029 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11033 for (; which >= 0; which--)
11034 switch (TREE_CODE (pmop[which]))
11039 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11042 /* tree_low_cst not used, because we don't care about
11044 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11046 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11051 else if (cst0 != 0)
11053 /* If C or D is of the form (A & N) where
11054 (N & M) == M, or of the form (A | N) or
11055 (A ^ N) where (N & M) == 0, replace it with A. */
11056 pmop[which] = TREE_OPERAND (pmop[which], 0);
11059 /* If C or D is a N where (N & M) == 0, it can be
11060 omitted (assumed 0). */
11061 if ((TREE_CODE (arg0) == PLUS_EXPR
11062 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11063 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11064 pmop[which] = NULL;
11070 /* Only build anything new if we optimized one or both arguments
11072 if (pmop[0] != TREE_OPERAND (arg0, 0)
11073 || (TREE_CODE (arg0) != NEGATE_EXPR
11074 && pmop[1] != TREE_OPERAND (arg0, 1)))
11076 tree utype = TREE_TYPE (arg0);
11077 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11079 /* Perform the operations in a type that has defined
11080 overflow behavior. */
11081 utype = unsigned_type_for (TREE_TYPE (arg0));
11082 if (pmop[0] != NULL)
11083 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11084 if (pmop[1] != NULL)
11085 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11088 if (TREE_CODE (arg0) == NEGATE_EXPR)
11089 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11090 else if (TREE_CODE (arg0) == PLUS_EXPR)
11092 if (pmop[0] != NULL && pmop[1] != NULL)
11093 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11095 else if (pmop[0] != NULL)
11097 else if (pmop[1] != NULL)
11100 return build_int_cst (type, 0);
11102 else if (pmop[0] == NULL)
11103 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11105 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11107 /* TEM is now the new binary +, - or unary - replacement. */
11108 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11109 fold_convert_loc (loc, utype, arg1));
11110 return fold_convert_loc (loc, type, tem);
11115 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11116 if (t1 != NULL_TREE)
11118 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11119 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11120 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11123 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11125 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11126 && (~TREE_INT_CST_LOW (arg1)
11127 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11129 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11132 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11134 This results in more efficient code for machines without a NOR
11135 instruction. Combine will canonicalize to the first form
11136 which will allow use of NOR instructions provided by the
11137 backend if they exist. */
11138 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11139 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11141 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11142 build2 (BIT_IOR_EXPR, type,
11143 fold_convert_loc (loc, type,
11144 TREE_OPERAND (arg0, 0)),
11145 fold_convert_loc (loc, type,
11146 TREE_OPERAND (arg1, 0))));
11149 /* If arg0 is derived from the address of an object or function, we may
11150 be able to fold this expression using the object or function's
11152 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11154 unsigned HOST_WIDE_INT modulus, residue;
11155 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11157 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11158 integer_onep (arg1));
11160 /* This works because modulus is a power of 2. If this weren't the
11161 case, we'd have to replace it by its greatest power-of-2
11162 divisor: modulus & -modulus. */
11164 return build_int_cst (type, residue & low);
11167 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11168 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11169 if the new mask might be further optimized. */
11170 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11171 || TREE_CODE (arg0) == RSHIFT_EXPR)
11172 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11173 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11174 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11175 < TYPE_PRECISION (TREE_TYPE (arg0))
11176 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11177 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11179 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11180 unsigned HOST_WIDE_INT mask
11181 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11182 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11183 tree shift_type = TREE_TYPE (arg0);
11185 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11186 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11187 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11188 && TYPE_PRECISION (TREE_TYPE (arg0))
11189 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11191 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11192 tree arg00 = TREE_OPERAND (arg0, 0);
11193 /* See if more bits can be proven as zero because of
11195 if (TREE_CODE (arg00) == NOP_EXPR
11196 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11198 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11199 if (TYPE_PRECISION (inner_type)
11200 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11201 && TYPE_PRECISION (inner_type) < prec)
11203 prec = TYPE_PRECISION (inner_type);
11204 /* See if we can shorten the right shift. */
11206 shift_type = inner_type;
11209 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11210 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11211 zerobits <<= prec - shiftc;
11212 /* For arithmetic shift if sign bit could be set, zerobits
11213 can contain actually sign bits, so no transformation is
11214 possible, unless MASK masks them all away. In that
11215 case the shift needs to be converted into logical shift. */
11216 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11217 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11219 if ((mask & zerobits) == 0)
11220 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11226 /* ((X << 16) & 0xff00) is (X, 0). */
11227 if ((mask & zerobits) == mask)
11228 return omit_one_operand_loc (loc, type,
11229 build_int_cst (type, 0), arg0);
11231 newmask = mask | zerobits;
11232 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11236 /* Only do the transformation if NEWMASK is some integer
11238 for (prec = BITS_PER_UNIT;
11239 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11240 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11242 if (prec < HOST_BITS_PER_WIDE_INT
11243 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11247 if (shift_type != TREE_TYPE (arg0))
11249 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11250 fold_convert_loc (loc, shift_type,
11251 TREE_OPERAND (arg0, 0)),
11252 TREE_OPERAND (arg0, 1));
11253 tem = fold_convert_loc (loc, type, tem);
11257 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11258 if (!tree_int_cst_equal (newmaskt, arg1))
11259 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11267 /* Don't touch a floating-point divide by zero unless the mode
11268 of the constant can represent infinity. */
11269 if (TREE_CODE (arg1) == REAL_CST
11270 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11271 && real_zerop (arg1))
11274 /* Optimize A / A to 1.0 if we don't care about
11275 NaNs or Infinities. Skip the transformation
11276 for non-real operands. */
11277 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11278 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11279 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11280 && operand_equal_p (arg0, arg1, 0))
11282 tree r = build_real (TREE_TYPE (arg0), dconst1);
11284 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11287 /* The complex version of the above A / A optimization. */
11288 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11289 && operand_equal_p (arg0, arg1, 0))
11291 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11292 if (! HONOR_NANS (TYPE_MODE (elem_type))
11293 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11295 tree r = build_real (elem_type, dconst1);
11296 /* omit_two_operands will call fold_convert for us. */
11297 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11301 /* (-A) / (-B) -> A / B */
11302 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11303 return fold_build2_loc (loc, RDIV_EXPR, type,
11304 TREE_OPERAND (arg0, 0),
11305 negate_expr (arg1));
11306 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11307 return fold_build2_loc (loc, RDIV_EXPR, type,
11308 negate_expr (arg0),
11309 TREE_OPERAND (arg1, 0));
11311 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11312 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11313 && real_onep (arg1))
11314 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11316 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11317 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11318 && real_minus_onep (arg1))
11319 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11320 negate_expr (arg0)));
11322 /* If ARG1 is a constant, we can convert this to a multiply by the
11323 reciprocal. This does not have the same rounding properties,
11324 so only do this if -freciprocal-math. We can actually
11325 always safely do it if ARG1 is a power of two, but it's hard to
11326 tell if it is or not in a portable manner. */
11327 if (TREE_CODE (arg1) == REAL_CST)
11329 if (flag_reciprocal_math
11330 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11332 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11333 /* Find the reciprocal if optimizing and the result is exact. */
11337 r = TREE_REAL_CST (arg1);
11338 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11340 tem = build_real (type, r);
11341 return fold_build2_loc (loc, MULT_EXPR, type,
11342 fold_convert_loc (loc, type, arg0), tem);
11346 /* Convert A/B/C to A/(B*C). */
11347 if (flag_reciprocal_math
11348 && TREE_CODE (arg0) == RDIV_EXPR)
11349 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11350 fold_build2_loc (loc, MULT_EXPR, type,
11351 TREE_OPERAND (arg0, 1), arg1));
11353 /* Convert A/(B/C) to (A/B)*C. */
11354 if (flag_reciprocal_math
11355 && TREE_CODE (arg1) == RDIV_EXPR)
11356 return fold_build2_loc (loc, MULT_EXPR, type,
11357 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11358 TREE_OPERAND (arg1, 0)),
11359 TREE_OPERAND (arg1, 1));
11361 /* Convert C1/(X*C2) into (C1/C2)/X. */
11362 if (flag_reciprocal_math
11363 && TREE_CODE (arg1) == MULT_EXPR
11364 && TREE_CODE (arg0) == REAL_CST
11365 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11367 tree tem = const_binop (RDIV_EXPR, arg0,
11368 TREE_OPERAND (arg1, 1));
11370 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11371 TREE_OPERAND (arg1, 0));
11374 if (flag_unsafe_math_optimizations)
11376 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11377 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11379 /* Optimize sin(x)/cos(x) as tan(x). */
11380 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11381 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11382 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11383 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11384 CALL_EXPR_ARG (arg1, 0), 0))
11386 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11388 if (tanfn != NULL_TREE)
11389 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11392 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11393 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11394 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11395 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11396 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11397 CALL_EXPR_ARG (arg1, 0), 0))
11399 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11401 if (tanfn != NULL_TREE)
11403 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11404 CALL_EXPR_ARG (arg0, 0));
11405 return fold_build2_loc (loc, RDIV_EXPR, type,
11406 build_real (type, dconst1), tmp);
11410 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11411 NaNs or Infinities. */
11412 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11413 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11414 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11416 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11417 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11419 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11420 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11421 && operand_equal_p (arg00, arg01, 0))
11423 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11425 if (cosfn != NULL_TREE)
11426 return build_call_expr_loc (loc, cosfn, 1, arg00);
11430 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11431 NaNs or Infinities. */
11432 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11433 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11434 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11436 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11437 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11439 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11440 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11441 && operand_equal_p (arg00, arg01, 0))
11443 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11445 if (cosfn != NULL_TREE)
11447 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11448 return fold_build2_loc (loc, RDIV_EXPR, type,
11449 build_real (type, dconst1),
11455 /* Optimize pow(x,c)/x as pow(x,c-1). */
11456 if (fcode0 == BUILT_IN_POW
11457 || fcode0 == BUILT_IN_POWF
11458 || fcode0 == BUILT_IN_POWL)
11460 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11461 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11462 if (TREE_CODE (arg01) == REAL_CST
11463 && !TREE_OVERFLOW (arg01)
11464 && operand_equal_p (arg1, arg00, 0))
11466 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11470 c = TREE_REAL_CST (arg01);
11471 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11472 arg = build_real (type, c);
11473 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11477 /* Optimize a/root(b/c) into a*root(c/b). */
11478 if (BUILTIN_ROOT_P (fcode1))
11480 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11482 if (TREE_CODE (rootarg) == RDIV_EXPR)
11484 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11485 tree b = TREE_OPERAND (rootarg, 0);
11486 tree c = TREE_OPERAND (rootarg, 1);
11488 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11490 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11491 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11495 /* Optimize x/expN(y) into x*expN(-y). */
11496 if (BUILTIN_EXPONENT_P (fcode1))
11498 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11499 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11500 arg1 = build_call_expr_loc (loc,
11502 fold_convert_loc (loc, type, arg));
11503 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11506 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11507 if (fcode1 == BUILT_IN_POW
11508 || fcode1 == BUILT_IN_POWF
11509 || fcode1 == BUILT_IN_POWL)
11511 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11512 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11513 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11514 tree neg11 = fold_convert_loc (loc, type,
11515 negate_expr (arg11));
11516 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11517 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11522 case TRUNC_DIV_EXPR:
11523 /* Optimize (X & (-A)) / A where A is a power of 2,
11525 if (TREE_CODE (arg0) == BIT_AND_EXPR
11526 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11527 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11529 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11530 arg1, TREE_OPERAND (arg0, 1));
11531 if (sum && integer_zerop (sum)) {
11532 unsigned long pow2;
11534 if (TREE_INT_CST_LOW (arg1))
11535 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11537 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11538 + HOST_BITS_PER_WIDE_INT;
11540 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11541 TREE_OPERAND (arg0, 0),
11542 build_int_cst (NULL_TREE, pow2));
11548 case FLOOR_DIV_EXPR:
11549 /* Simplify A / (B << N) where A and B are positive and B is
11550 a power of 2, to A >> (N + log2(B)). */
11551 strict_overflow_p = false;
11552 if (TREE_CODE (arg1) == LSHIFT_EXPR
11553 && (TYPE_UNSIGNED (type)
11554 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11556 tree sval = TREE_OPERAND (arg1, 0);
11557 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11559 tree sh_cnt = TREE_OPERAND (arg1, 1);
11560 unsigned long pow2;
11562 if (TREE_INT_CST_LOW (sval))
11563 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11565 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11566 + HOST_BITS_PER_WIDE_INT;
11568 if (strict_overflow_p)
11569 fold_overflow_warning (("assuming signed overflow does not "
11570 "occur when simplifying A / (B << N)"),
11571 WARN_STRICT_OVERFLOW_MISC);
11573 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11574 sh_cnt, build_int_cst (NULL_TREE, pow2));
11575 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11576 fold_convert_loc (loc, type, arg0), sh_cnt);
11580 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11581 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11582 if (INTEGRAL_TYPE_P (type)
11583 && TYPE_UNSIGNED (type)
11584 && code == FLOOR_DIV_EXPR)
11585 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11589 case ROUND_DIV_EXPR:
11590 case CEIL_DIV_EXPR:
11591 case EXACT_DIV_EXPR:
11592 if (integer_onep (arg1))
11593 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11594 if (integer_zerop (arg1))
11596 /* X / -1 is -X. */
11597 if (!TYPE_UNSIGNED (type)
11598 && TREE_CODE (arg1) == INTEGER_CST
11599 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11600 && TREE_INT_CST_HIGH (arg1) == -1)
11601 return fold_convert_loc (loc, type, negate_expr (arg0));
11603 /* Convert -A / -B to A / B when the type is signed and overflow is
11605 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11606 && TREE_CODE (arg0) == NEGATE_EXPR
11607 && negate_expr_p (arg1))
11609 if (INTEGRAL_TYPE_P (type))
11610 fold_overflow_warning (("assuming signed overflow does not occur "
11611 "when distributing negation across "
11613 WARN_STRICT_OVERFLOW_MISC);
11614 return fold_build2_loc (loc, code, type,
11615 fold_convert_loc (loc, type,
11616 TREE_OPERAND (arg0, 0)),
11617 fold_convert_loc (loc, type,
11618 negate_expr (arg1)));
11620 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11621 && TREE_CODE (arg1) == NEGATE_EXPR
11622 && negate_expr_p (arg0))
11624 if (INTEGRAL_TYPE_P (type))
11625 fold_overflow_warning (("assuming signed overflow does not occur "
11626 "when distributing negation across "
11628 WARN_STRICT_OVERFLOW_MISC);
11629 return fold_build2_loc (loc, code, type,
11630 fold_convert_loc (loc, type,
11631 negate_expr (arg0)),
11632 fold_convert_loc (loc, type,
11633 TREE_OPERAND (arg1, 0)));
11636 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11637 operation, EXACT_DIV_EXPR.
11639 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11640 At one time others generated faster code, it's not clear if they do
11641 after the last round to changes to the DIV code in expmed.c. */
11642 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11643 && multiple_of_p (type, arg0, arg1))
11644 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11646 strict_overflow_p = false;
11647 if (TREE_CODE (arg1) == INTEGER_CST
11648 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11649 &strict_overflow_p)))
11651 if (strict_overflow_p)
11652 fold_overflow_warning (("assuming signed overflow does not occur "
11653 "when simplifying division"),
11654 WARN_STRICT_OVERFLOW_MISC);
11655 return fold_convert_loc (loc, type, tem);
11660 case CEIL_MOD_EXPR:
11661 case FLOOR_MOD_EXPR:
11662 case ROUND_MOD_EXPR:
11663 case TRUNC_MOD_EXPR:
11664 /* X % 1 is always zero, but be sure to preserve any side
11666 if (integer_onep (arg1))
11667 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11669 /* X % 0, return X % 0 unchanged so that we can get the
11670 proper warnings and errors. */
11671 if (integer_zerop (arg1))
11674 /* 0 % X is always zero, but be sure to preserve any side
11675 effects in X. Place this after checking for X == 0. */
11676 if (integer_zerop (arg0))
11677 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11679 /* X % -1 is zero. */
11680 if (!TYPE_UNSIGNED (type)
11681 && TREE_CODE (arg1) == INTEGER_CST
11682 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11683 && TREE_INT_CST_HIGH (arg1) == -1)
11684 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11686 /* X % -C is the same as X % C. */
11687 if (code == TRUNC_MOD_EXPR
11688 && !TYPE_UNSIGNED (type)
11689 && TREE_CODE (arg1) == INTEGER_CST
11690 && !TREE_OVERFLOW (arg1)
11691 && TREE_INT_CST_HIGH (arg1) < 0
11692 && !TYPE_OVERFLOW_TRAPS (type)
11693 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11694 && !sign_bit_p (arg1, arg1))
11695 return fold_build2_loc (loc, code, type,
11696 fold_convert_loc (loc, type, arg0),
11697 fold_convert_loc (loc, type,
11698 negate_expr (arg1)));
11700 /* X % -Y is the same as X % Y. */
11701 if (code == TRUNC_MOD_EXPR
11702 && !TYPE_UNSIGNED (type)
11703 && TREE_CODE (arg1) == NEGATE_EXPR
11704 && !TYPE_OVERFLOW_TRAPS (type))
11705 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11706 fold_convert_loc (loc, type,
11707 TREE_OPERAND (arg1, 0)));
11709 strict_overflow_p = false;
11710 if (TREE_CODE (arg1) == INTEGER_CST
11711 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11712 &strict_overflow_p)))
11714 if (strict_overflow_p)
11715 fold_overflow_warning (("assuming signed overflow does not occur "
11716 "when simplifying modulus"),
11717 WARN_STRICT_OVERFLOW_MISC);
11718 return fold_convert_loc (loc, type, tem);
11721 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11722 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11723 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11724 && (TYPE_UNSIGNED (type)
11725 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11728 /* Also optimize A % (C << N) where C is a power of 2,
11729 to A & ((C << N) - 1). */
11730 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11731 c = TREE_OPERAND (arg1, 0);
11733 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11736 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11737 build_int_cst (TREE_TYPE (arg1), 1));
11738 if (strict_overflow_p)
11739 fold_overflow_warning (("assuming signed overflow does not "
11740 "occur when simplifying "
11741 "X % (power of two)"),
11742 WARN_STRICT_OVERFLOW_MISC);
11743 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11744 fold_convert_loc (loc, type, arg0),
11745 fold_convert_loc (loc, type, mask));
11753 if (integer_all_onesp (arg0))
11754 return omit_one_operand_loc (loc, type, arg0, arg1);
11758 /* Optimize -1 >> x for arithmetic right shifts. */
11759 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11760 && tree_expr_nonnegative_p (arg1))
11761 return omit_one_operand_loc (loc, type, arg0, arg1);
11762 /* ... fall through ... */
11766 if (integer_zerop (arg1))
11767 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11768 if (integer_zerop (arg0))
11769 return omit_one_operand_loc (loc, type, arg0, arg1);
11771 /* Since negative shift count is not well-defined,
11772 don't try to compute it in the compiler. */
11773 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11776 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11777 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11778 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11779 && host_integerp (TREE_OPERAND (arg0, 1), false)
11780 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11782 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11783 + TREE_INT_CST_LOW (arg1));
11785 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11786 being well defined. */
11787 if (low >= TYPE_PRECISION (type))
11789 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11790 low = low % TYPE_PRECISION (type);
11791 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11792 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11793 TREE_OPERAND (arg0, 0));
11795 low = TYPE_PRECISION (type) - 1;
11798 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11799 build_int_cst (type, low));
11802 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11803 into x & ((unsigned)-1 >> c) for unsigned types. */
11804 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11805 || (TYPE_UNSIGNED (type)
11806 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11807 && host_integerp (arg1, false)
11808 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11809 && host_integerp (TREE_OPERAND (arg0, 1), false)
11810 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11812 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11813 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11819 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11821 lshift = build_int_cst (type, -1);
11822 lshift = int_const_binop (code, lshift, arg1, 0);
11824 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11828 /* Rewrite an LROTATE_EXPR by a constant into an
11829 RROTATE_EXPR by a new constant. */
11830 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11832 tree tem = build_int_cst (TREE_TYPE (arg1),
11833 TYPE_PRECISION (type));
11834 tem = const_binop (MINUS_EXPR, tem, arg1);
11835 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11838 /* If we have a rotate of a bit operation with the rotate count and
11839 the second operand of the bit operation both constant,
11840 permute the two operations. */
11841 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11842 && (TREE_CODE (arg0) == BIT_AND_EXPR
11843 || TREE_CODE (arg0) == BIT_IOR_EXPR
11844 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11845 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11846 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11847 fold_build2_loc (loc, code, type,
11848 TREE_OPERAND (arg0, 0), arg1),
11849 fold_build2_loc (loc, code, type,
11850 TREE_OPERAND (arg0, 1), arg1));
11852 /* Two consecutive rotates adding up to the precision of the
11853 type can be ignored. */
11854 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11855 && TREE_CODE (arg0) == RROTATE_EXPR
11856 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11857 && TREE_INT_CST_HIGH (arg1) == 0
11858 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11859 && ((TREE_INT_CST_LOW (arg1)
11860 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11861 == (unsigned int) TYPE_PRECISION (type)))
11862 return TREE_OPERAND (arg0, 0);
11864 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11865 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11866 if the latter can be further optimized. */
11867 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11868 && TREE_CODE (arg0) == BIT_AND_EXPR
11869 && TREE_CODE (arg1) == INTEGER_CST
11870 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11872 tree mask = fold_build2_loc (loc, code, type,
11873 fold_convert_loc (loc, type,
11874 TREE_OPERAND (arg0, 1)),
11876 tree shift = fold_build2_loc (loc, code, type,
11877 fold_convert_loc (loc, type,
11878 TREE_OPERAND (arg0, 0)),
11880 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11888 if (operand_equal_p (arg0, arg1, 0))
11889 return omit_one_operand_loc (loc, type, arg0, arg1);
11890 if (INTEGRAL_TYPE_P (type)
11891 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11892 return omit_one_operand_loc (loc, type, arg1, arg0);
11893 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11899 if (operand_equal_p (arg0, arg1, 0))
11900 return omit_one_operand_loc (loc, type, arg0, arg1);
11901 if (INTEGRAL_TYPE_P (type)
11902 && TYPE_MAX_VALUE (type)
11903 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11904 return omit_one_operand_loc (loc, type, arg1, arg0);
11905 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11910 case TRUTH_ANDIF_EXPR:
11911 /* Note that the operands of this must be ints
11912 and their values must be 0 or 1.
11913 ("true" is a fixed value perhaps depending on the language.) */
11914 /* If first arg is constant zero, return it. */
11915 if (integer_zerop (arg0))
11916 return fold_convert_loc (loc, type, arg0);
11917 case TRUTH_AND_EXPR:
11918 /* If either arg is constant true, drop it. */
11919 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11920 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11921 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11922 /* Preserve sequence points. */
11923 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11924 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11925 /* If second arg is constant zero, result is zero, but first arg
11926 must be evaluated. */
11927 if (integer_zerop (arg1))
11928 return omit_one_operand_loc (loc, type, arg1, arg0);
11929 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11930 case will be handled here. */
11931 if (integer_zerop (arg0))
11932 return omit_one_operand_loc (loc, type, arg0, arg1);
11934 /* !X && X is always false. */
11935 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11936 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11937 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11938 /* X && !X is always false. */
11939 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11940 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11941 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11943 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11944 means A >= Y && A != MAX, but in this case we know that
11947 if (!TREE_SIDE_EFFECTS (arg0)
11948 && !TREE_SIDE_EFFECTS (arg1))
11950 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11951 if (tem && !operand_equal_p (tem, arg0, 0))
11952 return fold_build2_loc (loc, code, type, tem, arg1);
11954 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11955 if (tem && !operand_equal_p (tem, arg1, 0))
11956 return fold_build2_loc (loc, code, type, arg0, tem);
11960 /* We only do these simplifications if we are optimizing. */
11964 /* Check for things like (A || B) && (A || C). We can convert this
11965 to A || (B && C). Note that either operator can be any of the four
11966 truth and/or operations and the transformation will still be
11967 valid. Also note that we only care about order for the
11968 ANDIF and ORIF operators. If B contains side effects, this
11969 might change the truth-value of A. */
11970 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11971 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11972 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11973 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11974 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11975 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11977 tree a00 = TREE_OPERAND (arg0, 0);
11978 tree a01 = TREE_OPERAND (arg0, 1);
11979 tree a10 = TREE_OPERAND (arg1, 0);
11980 tree a11 = TREE_OPERAND (arg1, 1);
11981 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11982 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11983 && (code == TRUTH_AND_EXPR
11984 || code == TRUTH_OR_EXPR));
11986 if (operand_equal_p (a00, a10, 0))
11987 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11988 fold_build2_loc (loc, code, type, a01, a11));
11989 else if (commutative && operand_equal_p (a00, a11, 0))
11990 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11991 fold_build2_loc (loc, code, type, a01, a10));
11992 else if (commutative && operand_equal_p (a01, a10, 0))
11993 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11994 fold_build2_loc (loc, code, type, a00, a11));
11996 /* This case if tricky because we must either have commutative
11997 operators or else A10 must not have side-effects. */
11999 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12000 && operand_equal_p (a01, a11, 0))
12001 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12002 fold_build2_loc (loc, code, type, a00, a10),
12006 /* See if we can build a range comparison. */
12007 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12010 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12011 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12013 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12015 return fold_build2_loc (loc, code, type, tem, arg1);
12018 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12019 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12021 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12023 return fold_build2_loc (loc, code, type, arg0, tem);
12026 /* Check for the possibility of merging component references. If our
12027 lhs is another similar operation, try to merge its rhs with our
12028 rhs. Then try to merge our lhs and rhs. */
12029 if (TREE_CODE (arg0) == code
12030 && 0 != (tem = fold_truthop (loc, code, type,
12031 TREE_OPERAND (arg0, 1), arg1)))
12032 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12034 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12039 case TRUTH_ORIF_EXPR:
12040 /* Note that the operands of this must be ints
12041 and their values must be 0 or true.
12042 ("true" is a fixed value perhaps depending on the language.) */
12043 /* If first arg is constant true, return it. */
12044 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12045 return fold_convert_loc (loc, type, arg0);
12046 case TRUTH_OR_EXPR:
12047 /* If either arg is constant zero, drop it. */
12048 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12049 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12050 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12051 /* Preserve sequence points. */
12052 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12053 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12054 /* If second arg is constant true, result is true, but we must
12055 evaluate first arg. */
12056 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12057 return omit_one_operand_loc (loc, type, arg1, arg0);
12058 /* Likewise for first arg, but note this only occurs here for
12060 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12061 return omit_one_operand_loc (loc, type, arg0, arg1);
12063 /* !X || X is always true. */
12064 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12065 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12066 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12067 /* X || !X is always true. */
12068 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12069 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12070 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12074 case TRUTH_XOR_EXPR:
12075 /* If the second arg is constant zero, drop it. */
12076 if (integer_zerop (arg1))
12077 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12078 /* If the second arg is constant true, this is a logical inversion. */
12079 if (integer_onep (arg1))
12081 /* Only call invert_truthvalue if operand is a truth value. */
12082 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12083 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12085 tem = invert_truthvalue_loc (loc, arg0);
12086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12088 /* Identical arguments cancel to zero. */
12089 if (operand_equal_p (arg0, arg1, 0))
12090 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12092 /* !X ^ X is always true. */
12093 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12094 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12095 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12097 /* X ^ !X is always true. */
12098 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12099 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12100 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12106 tem = fold_comparison (loc, code, type, op0, op1);
12107 if (tem != NULL_TREE)
12110 /* bool_var != 0 becomes bool_var. */
12111 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12112 && code == NE_EXPR)
12113 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12115 /* bool_var == 1 becomes bool_var. */
12116 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12117 && code == EQ_EXPR)
12118 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12120 /* bool_var != 1 becomes !bool_var. */
12121 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12122 && code == NE_EXPR)
12123 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12124 fold_convert_loc (loc, type, arg0));
12126 /* bool_var == 0 becomes !bool_var. */
12127 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12128 && code == EQ_EXPR)
12129 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12130 fold_convert_loc (loc, type, arg0));
12132 /* !exp != 0 becomes !exp */
12133 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12134 && code == NE_EXPR)
12135 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12137 /* If this is an equality comparison of the address of two non-weak,
12138 unaliased symbols neither of which are extern (since we do not
12139 have access to attributes for externs), then we know the result. */
12140 if (TREE_CODE (arg0) == ADDR_EXPR
12141 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12142 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12143 && ! lookup_attribute ("alias",
12144 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12145 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12146 && TREE_CODE (arg1) == ADDR_EXPR
12147 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12148 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12149 && ! lookup_attribute ("alias",
12150 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12151 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12153 /* We know that we're looking at the address of two
12154 non-weak, unaliased, static _DECL nodes.
12156 It is both wasteful and incorrect to call operand_equal_p
12157 to compare the two ADDR_EXPR nodes. It is wasteful in that
12158 all we need to do is test pointer equality for the arguments
12159 to the two ADDR_EXPR nodes. It is incorrect to use
12160 operand_equal_p as that function is NOT equivalent to a
12161 C equality test. It can in fact return false for two
12162 objects which would test as equal using the C equality
12164 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12165 return constant_boolean_node (equal
12166 ? code == EQ_EXPR : code != EQ_EXPR,
12170 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12171 a MINUS_EXPR of a constant, we can convert it into a comparison with
12172 a revised constant as long as no overflow occurs. */
12173 if (TREE_CODE (arg1) == INTEGER_CST
12174 && (TREE_CODE (arg0) == PLUS_EXPR
12175 || TREE_CODE (arg0) == MINUS_EXPR)
12176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12177 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12178 ? MINUS_EXPR : PLUS_EXPR,
12179 fold_convert_loc (loc, TREE_TYPE (arg0),
12181 TREE_OPERAND (arg0, 1)))
12182 && !TREE_OVERFLOW (tem))
12183 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12185 /* Similarly for a NEGATE_EXPR. */
12186 if (TREE_CODE (arg0) == NEGATE_EXPR
12187 && TREE_CODE (arg1) == INTEGER_CST
12188 && 0 != (tem = negate_expr (arg1))
12189 && TREE_CODE (tem) == INTEGER_CST
12190 && !TREE_OVERFLOW (tem))
12191 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12193 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12194 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12195 && TREE_CODE (arg1) == INTEGER_CST
12196 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12197 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12198 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12199 fold_convert_loc (loc,
12202 TREE_OPERAND (arg0, 1)));
12204 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12205 if ((TREE_CODE (arg0) == PLUS_EXPR
12206 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12207 || TREE_CODE (arg0) == MINUS_EXPR)
12208 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12209 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12210 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12212 tree val = TREE_OPERAND (arg0, 1);
12213 return omit_two_operands_loc (loc, type,
12214 fold_build2_loc (loc, code, type,
12216 build_int_cst (TREE_TYPE (val),
12218 TREE_OPERAND (arg0, 0), arg1);
12221 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12222 if (TREE_CODE (arg0) == MINUS_EXPR
12223 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12224 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12225 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12227 return omit_two_operands_loc (loc, type,
12229 ? boolean_true_node : boolean_false_node,
12230 TREE_OPERAND (arg0, 1), arg1);
12233 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12234 for !=. Don't do this for ordered comparisons due to overflow. */
12235 if (TREE_CODE (arg0) == MINUS_EXPR
12236 && integer_zerop (arg1))
12237 return fold_build2_loc (loc, code, type,
12238 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12240 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12241 if (TREE_CODE (arg0) == ABS_EXPR
12242 && (integer_zerop (arg1) || real_zerop (arg1)))
12243 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12245 /* If this is an EQ or NE comparison with zero and ARG0 is
12246 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12247 two operations, but the latter can be done in one less insn
12248 on machines that have only two-operand insns or on which a
12249 constant cannot be the first operand. */
12250 if (TREE_CODE (arg0) == BIT_AND_EXPR
12251 && integer_zerop (arg1))
12253 tree arg00 = TREE_OPERAND (arg0, 0);
12254 tree arg01 = TREE_OPERAND (arg0, 1);
12255 if (TREE_CODE (arg00) == LSHIFT_EXPR
12256 && integer_onep (TREE_OPERAND (arg00, 0)))
12258 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12259 arg01, TREE_OPERAND (arg00, 1));
12260 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12261 build_int_cst (TREE_TYPE (arg0), 1));
12262 return fold_build2_loc (loc, code, type,
12263 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12266 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12267 && integer_onep (TREE_OPERAND (arg01, 0)))
12269 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12270 arg00, TREE_OPERAND (arg01, 1));
12271 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12272 build_int_cst (TREE_TYPE (arg0), 1));
12273 return fold_build2_loc (loc, code, type,
12274 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12279 /* If this is an NE or EQ comparison of zero against the result of a
12280 signed MOD operation whose second operand is a power of 2, make
12281 the MOD operation unsigned since it is simpler and equivalent. */
12282 if (integer_zerop (arg1)
12283 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12284 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12285 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12286 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12287 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12288 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12290 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12291 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12292 fold_convert_loc (loc, newtype,
12293 TREE_OPERAND (arg0, 0)),
12294 fold_convert_loc (loc, newtype,
12295 TREE_OPERAND (arg0, 1)));
12297 return fold_build2_loc (loc, code, type, newmod,
12298 fold_convert_loc (loc, newtype, arg1));
12301 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12302 C1 is a valid shift constant, and C2 is a power of two, i.e.
12304 if (TREE_CODE (arg0) == BIT_AND_EXPR
12305 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12306 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12308 && integer_pow2p (TREE_OPERAND (arg0, 1))
12309 && integer_zerop (arg1))
12311 tree itype = TREE_TYPE (arg0);
12312 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12313 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12315 /* Check for a valid shift count. */
12316 if (TREE_INT_CST_HIGH (arg001) == 0
12317 && TREE_INT_CST_LOW (arg001) < prec)
12319 tree arg01 = TREE_OPERAND (arg0, 1);
12320 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12321 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12322 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12323 can be rewritten as (X & (C2 << C1)) != 0. */
12324 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12326 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12327 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12328 return fold_build2_loc (loc, code, type, tem, arg1);
12330 /* Otherwise, for signed (arithmetic) shifts,
12331 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12332 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12333 else if (!TYPE_UNSIGNED (itype))
12334 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12335 arg000, build_int_cst (itype, 0));
12336 /* Otherwise, of unsigned (logical) shifts,
12337 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12338 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12340 return omit_one_operand_loc (loc, type,
12341 code == EQ_EXPR ? integer_one_node
12342 : integer_zero_node,
12347 /* If this is an NE comparison of zero with an AND of one, remove the
12348 comparison since the AND will give the correct value. */
12349 if (code == NE_EXPR
12350 && integer_zerop (arg1)
12351 && TREE_CODE (arg0) == BIT_AND_EXPR
12352 && integer_onep (TREE_OPERAND (arg0, 1)))
12353 return fold_convert_loc (loc, type, arg0);
12355 /* If we have (A & C) == C where C is a power of 2, convert this into
12356 (A & C) != 0. Similarly for NE_EXPR. */
12357 if (TREE_CODE (arg0) == BIT_AND_EXPR
12358 && integer_pow2p (TREE_OPERAND (arg0, 1))
12359 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12360 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12361 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12362 integer_zero_node));
12364 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12365 bit, then fold the expression into A < 0 or A >= 0. */
12366 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12370 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12371 Similarly for NE_EXPR. */
12372 if (TREE_CODE (arg0) == BIT_AND_EXPR
12373 && TREE_CODE (arg1) == INTEGER_CST
12374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12376 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12377 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12378 TREE_OPERAND (arg0, 1));
12379 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12381 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12382 if (integer_nonzerop (dandnotc))
12383 return omit_one_operand_loc (loc, type, rslt, arg0);
12386 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12387 Similarly for NE_EXPR. */
12388 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12389 && TREE_CODE (arg1) == INTEGER_CST
12390 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12392 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12393 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12394 TREE_OPERAND (arg0, 1), notd);
12395 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12396 if (integer_nonzerop (candnotd))
12397 return omit_one_operand_loc (loc, type, rslt, arg0);
12400 /* If this is a comparison of a field, we may be able to simplify it. */
12401 if ((TREE_CODE (arg0) == COMPONENT_REF
12402 || TREE_CODE (arg0) == BIT_FIELD_REF)
12403 /* Handle the constant case even without -O
12404 to make sure the warnings are given. */
12405 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12407 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12412 /* Optimize comparisons of strlen vs zero to a compare of the
12413 first character of the string vs zero. To wit,
12414 strlen(ptr) == 0 => *ptr == 0
12415 strlen(ptr) != 0 => *ptr != 0
12416 Other cases should reduce to one of these two (or a constant)
12417 due to the return value of strlen being unsigned. */
12418 if (TREE_CODE (arg0) == CALL_EXPR
12419 && integer_zerop (arg1))
12421 tree fndecl = get_callee_fndecl (arg0);
12424 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12425 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12426 && call_expr_nargs (arg0) == 1
12427 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12429 tree iref = build_fold_indirect_ref_loc (loc,
12430 CALL_EXPR_ARG (arg0, 0));
12431 return fold_build2_loc (loc, code, type, iref,
12432 build_int_cst (TREE_TYPE (iref), 0));
12436 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12437 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12438 if (TREE_CODE (arg0) == RSHIFT_EXPR
12439 && integer_zerop (arg1)
12440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12442 tree arg00 = TREE_OPERAND (arg0, 0);
12443 tree arg01 = TREE_OPERAND (arg0, 1);
12444 tree itype = TREE_TYPE (arg00);
12445 if (TREE_INT_CST_HIGH (arg01) == 0
12446 && TREE_INT_CST_LOW (arg01)
12447 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12449 if (TYPE_UNSIGNED (itype))
12451 itype = signed_type_for (itype);
12452 arg00 = fold_convert_loc (loc, itype, arg00);
12454 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12455 type, arg00, build_int_cst (itype, 0));
12459 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12460 if (integer_zerop (arg1)
12461 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12462 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12463 TREE_OPERAND (arg0, 1));
12465 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12466 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12467 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12468 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12469 build_int_cst (TREE_TYPE (arg1), 0));
12470 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12471 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12472 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12473 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12474 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12475 build_int_cst (TREE_TYPE (arg1), 0));
12477 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12478 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12479 && TREE_CODE (arg1) == INTEGER_CST
12480 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12481 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12482 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12483 TREE_OPERAND (arg0, 1), arg1));
12485 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12486 (X & C) == 0 when C is a single bit. */
12487 if (TREE_CODE (arg0) == BIT_AND_EXPR
12488 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12489 && integer_zerop (arg1)
12490 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12492 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12493 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12494 TREE_OPERAND (arg0, 1));
12495 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12499 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12500 constant C is a power of two, i.e. a single bit. */
12501 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12502 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12503 && integer_zerop (arg1)
12504 && integer_pow2p (TREE_OPERAND (arg0, 1))
12505 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12506 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12508 tree arg00 = TREE_OPERAND (arg0, 0);
12509 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12510 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12513 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12514 when is C is a power of two, i.e. a single bit. */
12515 if (TREE_CODE (arg0) == BIT_AND_EXPR
12516 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12517 && integer_zerop (arg1)
12518 && integer_pow2p (TREE_OPERAND (arg0, 1))
12519 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12520 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12522 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12523 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12524 arg000, TREE_OPERAND (arg0, 1));
12525 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12526 tem, build_int_cst (TREE_TYPE (tem), 0));
12529 if (integer_zerop (arg1)
12530 && tree_expr_nonzero_p (arg0))
12532 tree res = constant_boolean_node (code==NE_EXPR, type);
12533 return omit_one_operand_loc (loc, type, res, arg0);
12536 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12537 if (TREE_CODE (arg0) == NEGATE_EXPR
12538 && TREE_CODE (arg1) == NEGATE_EXPR)
12539 return fold_build2_loc (loc, code, type,
12540 TREE_OPERAND (arg0, 0),
12541 TREE_OPERAND (arg1, 0));
12543 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12544 if (TREE_CODE (arg0) == BIT_AND_EXPR
12545 && TREE_CODE (arg1) == BIT_AND_EXPR)
12547 tree arg00 = TREE_OPERAND (arg0, 0);
12548 tree arg01 = TREE_OPERAND (arg0, 1);
12549 tree arg10 = TREE_OPERAND (arg1, 0);
12550 tree arg11 = TREE_OPERAND (arg1, 1);
12551 tree itype = TREE_TYPE (arg0);
12553 if (operand_equal_p (arg01, arg11, 0))
12554 return fold_build2_loc (loc, code, type,
12555 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12556 fold_build2_loc (loc,
12557 BIT_XOR_EXPR, itype,
12560 build_int_cst (itype, 0));
12562 if (operand_equal_p (arg01, arg10, 0))
12563 return fold_build2_loc (loc, code, type,
12564 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12565 fold_build2_loc (loc,
12566 BIT_XOR_EXPR, itype,
12569 build_int_cst (itype, 0));
12571 if (operand_equal_p (arg00, arg11, 0))
12572 return fold_build2_loc (loc, code, type,
12573 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12574 fold_build2_loc (loc,
12575 BIT_XOR_EXPR, itype,
12578 build_int_cst (itype, 0));
12580 if (operand_equal_p (arg00, arg10, 0))
12581 return fold_build2_loc (loc, code, type,
12582 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12583 fold_build2_loc (loc,
12584 BIT_XOR_EXPR, itype,
12587 build_int_cst (itype, 0));
12590 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12591 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12593 tree arg00 = TREE_OPERAND (arg0, 0);
12594 tree arg01 = TREE_OPERAND (arg0, 1);
12595 tree arg10 = TREE_OPERAND (arg1, 0);
12596 tree arg11 = TREE_OPERAND (arg1, 1);
12597 tree itype = TREE_TYPE (arg0);
12599 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12600 operand_equal_p guarantees no side-effects so we don't need
12601 to use omit_one_operand on Z. */
12602 if (operand_equal_p (arg01, arg11, 0))
12603 return fold_build2_loc (loc, code, type, arg00, arg10);
12604 if (operand_equal_p (arg01, arg10, 0))
12605 return fold_build2_loc (loc, code, type, arg00, arg11);
12606 if (operand_equal_p (arg00, arg11, 0))
12607 return fold_build2_loc (loc, code, type, arg01, arg10);
12608 if (operand_equal_p (arg00, arg10, 0))
12609 return fold_build2_loc (loc, code, type, arg01, arg11);
12611 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12612 if (TREE_CODE (arg01) == INTEGER_CST
12613 && TREE_CODE (arg11) == INTEGER_CST)
12614 return fold_build2_loc (loc, code, type,
12615 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12616 fold_build2_loc (loc,
12617 BIT_XOR_EXPR, itype,
12622 /* Attempt to simplify equality/inequality comparisons of complex
12623 values. Only lower the comparison if the result is known or
12624 can be simplified to a single scalar comparison. */
12625 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12626 || TREE_CODE (arg0) == COMPLEX_CST)
12627 && (TREE_CODE (arg1) == COMPLEX_EXPR
12628 || TREE_CODE (arg1) == COMPLEX_CST))
12630 tree real0, imag0, real1, imag1;
12633 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12635 real0 = TREE_OPERAND (arg0, 0);
12636 imag0 = TREE_OPERAND (arg0, 1);
12640 real0 = TREE_REALPART (arg0);
12641 imag0 = TREE_IMAGPART (arg0);
12644 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12646 real1 = TREE_OPERAND (arg1, 0);
12647 imag1 = TREE_OPERAND (arg1, 1);
12651 real1 = TREE_REALPART (arg1);
12652 imag1 = TREE_IMAGPART (arg1);
12655 rcond = fold_binary_loc (loc, code, type, real0, real1);
12656 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12658 if (integer_zerop (rcond))
12660 if (code == EQ_EXPR)
12661 return omit_two_operands_loc (loc, type, boolean_false_node,
12663 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12667 if (code == NE_EXPR)
12668 return omit_two_operands_loc (loc, type, boolean_true_node,
12670 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12674 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12675 if (icond && TREE_CODE (icond) == INTEGER_CST)
12677 if (integer_zerop (icond))
12679 if (code == EQ_EXPR)
12680 return omit_two_operands_loc (loc, type, boolean_false_node,
12682 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12686 if (code == NE_EXPR)
12687 return omit_two_operands_loc (loc, type, boolean_true_node,
12689 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12700 tem = fold_comparison (loc, code, type, op0, op1);
12701 if (tem != NULL_TREE)
12704 /* Transform comparisons of the form X +- C CMP X. */
12705 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12706 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12707 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12708 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12709 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12710 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12712 tree arg01 = TREE_OPERAND (arg0, 1);
12713 enum tree_code code0 = TREE_CODE (arg0);
12716 if (TREE_CODE (arg01) == REAL_CST)
12717 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12719 is_positive = tree_int_cst_sgn (arg01);
12721 /* (X - c) > X becomes false. */
12722 if (code == GT_EXPR
12723 && ((code0 == MINUS_EXPR && is_positive >= 0)
12724 || (code0 == PLUS_EXPR && is_positive <= 0)))
12726 if (TREE_CODE (arg01) == INTEGER_CST
12727 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12728 fold_overflow_warning (("assuming signed overflow does not "
12729 "occur when assuming that (X - c) > X "
12730 "is always false"),
12731 WARN_STRICT_OVERFLOW_ALL);
12732 return constant_boolean_node (0, type);
12735 /* Likewise (X + c) < X becomes false. */
12736 if (code == LT_EXPR
12737 && ((code0 == PLUS_EXPR && is_positive >= 0)
12738 || (code0 == MINUS_EXPR && is_positive <= 0)))
12740 if (TREE_CODE (arg01) == INTEGER_CST
12741 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12742 fold_overflow_warning (("assuming signed overflow does not "
12743 "occur when assuming that "
12744 "(X + c) < X is always false"),
12745 WARN_STRICT_OVERFLOW_ALL);
12746 return constant_boolean_node (0, type);
12749 /* Convert (X - c) <= X to true. */
12750 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12752 && ((code0 == MINUS_EXPR && is_positive >= 0)
12753 || (code0 == PLUS_EXPR && is_positive <= 0)))
12755 if (TREE_CODE (arg01) == INTEGER_CST
12756 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12757 fold_overflow_warning (("assuming signed overflow does not "
12758 "occur when assuming that "
12759 "(X - c) <= X is always true"),
12760 WARN_STRICT_OVERFLOW_ALL);
12761 return constant_boolean_node (1, type);
12764 /* Convert (X + c) >= X to true. */
12765 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12767 && ((code0 == PLUS_EXPR && is_positive >= 0)
12768 || (code0 == MINUS_EXPR && is_positive <= 0)))
12770 if (TREE_CODE (arg01) == INTEGER_CST
12771 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12772 fold_overflow_warning (("assuming signed overflow does not "
12773 "occur when assuming that "
12774 "(X + c) >= X is always true"),
12775 WARN_STRICT_OVERFLOW_ALL);
12776 return constant_boolean_node (1, type);
12779 if (TREE_CODE (arg01) == INTEGER_CST)
12781 /* Convert X + c > X and X - c < X to true for integers. */
12782 if (code == GT_EXPR
12783 && ((code0 == PLUS_EXPR && is_positive > 0)
12784 || (code0 == MINUS_EXPR && is_positive < 0)))
12786 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12787 fold_overflow_warning (("assuming signed overflow does "
12788 "not occur when assuming that "
12789 "(X + c) > X is always true"),
12790 WARN_STRICT_OVERFLOW_ALL);
12791 return constant_boolean_node (1, type);
12794 if (code == LT_EXPR
12795 && ((code0 == MINUS_EXPR && is_positive > 0)
12796 || (code0 == PLUS_EXPR && is_positive < 0)))
12798 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12799 fold_overflow_warning (("assuming signed overflow does "
12800 "not occur when assuming that "
12801 "(X - c) < X is always true"),
12802 WARN_STRICT_OVERFLOW_ALL);
12803 return constant_boolean_node (1, type);
12806 /* Convert X + c <= X and X - c >= X to false for integers. */
12807 if (code == LE_EXPR
12808 && ((code0 == PLUS_EXPR && is_positive > 0)
12809 || (code0 == MINUS_EXPR && is_positive < 0)))
12811 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12812 fold_overflow_warning (("assuming signed overflow does "
12813 "not occur when assuming that "
12814 "(X + c) <= X is always false"),
12815 WARN_STRICT_OVERFLOW_ALL);
12816 return constant_boolean_node (0, type);
12819 if (code == GE_EXPR
12820 && ((code0 == MINUS_EXPR && is_positive > 0)
12821 || (code0 == PLUS_EXPR && is_positive < 0)))
12823 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12824 fold_overflow_warning (("assuming signed overflow does "
12825 "not occur when assuming that "
12826 "(X - c) >= X is always false"),
12827 WARN_STRICT_OVERFLOW_ALL);
12828 return constant_boolean_node (0, type);
12833 /* Comparisons with the highest or lowest possible integer of
12834 the specified precision will have known values. */
12836 tree arg1_type = TREE_TYPE (arg1);
12837 unsigned int width = TYPE_PRECISION (arg1_type);
12839 if (TREE_CODE (arg1) == INTEGER_CST
12840 && width <= 2 * HOST_BITS_PER_WIDE_INT
12841 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12843 HOST_WIDE_INT signed_max_hi;
12844 unsigned HOST_WIDE_INT signed_max_lo;
12845 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12847 if (width <= HOST_BITS_PER_WIDE_INT)
12849 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12854 if (TYPE_UNSIGNED (arg1_type))
12856 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12862 max_lo = signed_max_lo;
12863 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12869 width -= HOST_BITS_PER_WIDE_INT;
12870 signed_max_lo = -1;
12871 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12876 if (TYPE_UNSIGNED (arg1_type))
12878 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12883 max_hi = signed_max_hi;
12884 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12888 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12889 && TREE_INT_CST_LOW (arg1) == max_lo)
12893 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12896 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12899 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12902 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12904 /* The GE_EXPR and LT_EXPR cases above are not normally
12905 reached because of previous transformations. */
12910 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12912 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12916 arg1 = const_binop (PLUS_EXPR, arg1,
12917 build_int_cst (TREE_TYPE (arg1), 1));
12918 return fold_build2_loc (loc, EQ_EXPR, type,
12919 fold_convert_loc (loc,
12920 TREE_TYPE (arg1), arg0),
12923 arg1 = const_binop (PLUS_EXPR, arg1,
12924 build_int_cst (TREE_TYPE (arg1), 1));
12925 return fold_build2_loc (loc, NE_EXPR, type,
12926 fold_convert_loc (loc, TREE_TYPE (arg1),
12932 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12934 && TREE_INT_CST_LOW (arg1) == min_lo)
12938 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12941 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12944 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12947 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12952 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12954 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12958 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12959 return fold_build2_loc (loc, NE_EXPR, type,
12960 fold_convert_loc (loc,
12961 TREE_TYPE (arg1), arg0),
12964 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12965 return fold_build2_loc (loc, EQ_EXPR, type,
12966 fold_convert_loc (loc, TREE_TYPE (arg1),
12973 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12974 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12975 && TYPE_UNSIGNED (arg1_type)
12976 /* We will flip the signedness of the comparison operator
12977 associated with the mode of arg1, so the sign bit is
12978 specified by this mode. Check that arg1 is the signed
12979 max associated with this sign bit. */
12980 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12981 /* signed_type does not work on pointer types. */
12982 && INTEGRAL_TYPE_P (arg1_type))
12984 /* The following case also applies to X < signed_max+1
12985 and X >= signed_max+1 because previous transformations. */
12986 if (code == LE_EXPR || code == GT_EXPR)
12989 st = signed_type_for (TREE_TYPE (arg1));
12990 return fold_build2_loc (loc,
12991 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12992 type, fold_convert_loc (loc, st, arg0),
12993 build_int_cst (st, 0));
12999 /* If we are comparing an ABS_EXPR with a constant, we can
13000 convert all the cases into explicit comparisons, but they may
13001 well not be faster than doing the ABS and one comparison.
13002 But ABS (X) <= C is a range comparison, which becomes a subtraction
13003 and a comparison, and is probably faster. */
13004 if (code == LE_EXPR
13005 && TREE_CODE (arg1) == INTEGER_CST
13006 && TREE_CODE (arg0) == ABS_EXPR
13007 && ! TREE_SIDE_EFFECTS (arg0)
13008 && (0 != (tem = negate_expr (arg1)))
13009 && TREE_CODE (tem) == INTEGER_CST
13010 && !TREE_OVERFLOW (tem))
13011 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13012 build2 (GE_EXPR, type,
13013 TREE_OPERAND (arg0, 0), tem),
13014 build2 (LE_EXPR, type,
13015 TREE_OPERAND (arg0, 0), arg1));
13017 /* Convert ABS_EXPR<x> >= 0 to true. */
13018 strict_overflow_p = false;
13019 if (code == GE_EXPR
13020 && (integer_zerop (arg1)
13021 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13022 && real_zerop (arg1)))
13023 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13025 if (strict_overflow_p)
13026 fold_overflow_warning (("assuming signed overflow does not occur "
13027 "when simplifying comparison of "
13028 "absolute value and zero"),
13029 WARN_STRICT_OVERFLOW_CONDITIONAL);
13030 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13033 /* Convert ABS_EXPR<x> < 0 to false. */
13034 strict_overflow_p = false;
13035 if (code == LT_EXPR
13036 && (integer_zerop (arg1) || real_zerop (arg1))
13037 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13039 if (strict_overflow_p)
13040 fold_overflow_warning (("assuming signed overflow does not occur "
13041 "when simplifying comparison of "
13042 "absolute value and zero"),
13043 WARN_STRICT_OVERFLOW_CONDITIONAL);
13044 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13047 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13048 and similarly for >= into !=. */
13049 if ((code == LT_EXPR || code == GE_EXPR)
13050 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13051 && TREE_CODE (arg1) == LSHIFT_EXPR
13052 && integer_onep (TREE_OPERAND (arg1, 0)))
13053 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13054 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13055 TREE_OPERAND (arg1, 1)),
13056 build_int_cst (TREE_TYPE (arg0), 0));
13058 if ((code == LT_EXPR || code == GE_EXPR)
13059 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13060 && CONVERT_EXPR_P (arg1)
13061 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13062 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13064 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13065 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13066 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13067 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13068 build_int_cst (TREE_TYPE (arg0), 0));
13073 case UNORDERED_EXPR:
13081 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13083 t1 = fold_relational_const (code, type, arg0, arg1);
13084 if (t1 != NULL_TREE)
13088 /* If the first operand is NaN, the result is constant. */
13089 if (TREE_CODE (arg0) == REAL_CST
13090 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13091 && (code != LTGT_EXPR || ! flag_trapping_math))
13093 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13094 ? integer_zero_node
13095 : integer_one_node;
13096 return omit_one_operand_loc (loc, type, t1, arg1);
13099 /* If the second operand is NaN, the result is constant. */
13100 if (TREE_CODE (arg1) == REAL_CST
13101 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13102 && (code != LTGT_EXPR || ! flag_trapping_math))
13104 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13105 ? integer_zero_node
13106 : integer_one_node;
13107 return omit_one_operand_loc (loc, type, t1, arg0);
13110 /* Simplify unordered comparison of something with itself. */
13111 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13112 && operand_equal_p (arg0, arg1, 0))
13113 return constant_boolean_node (1, type);
13115 if (code == LTGT_EXPR
13116 && !flag_trapping_math
13117 && operand_equal_p (arg0, arg1, 0))
13118 return constant_boolean_node (0, type);
13120 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13122 tree targ0 = strip_float_extensions (arg0);
13123 tree targ1 = strip_float_extensions (arg1);
13124 tree newtype = TREE_TYPE (targ0);
13126 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13127 newtype = TREE_TYPE (targ1);
13129 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13130 return fold_build2_loc (loc, code, type,
13131 fold_convert_loc (loc, newtype, targ0),
13132 fold_convert_loc (loc, newtype, targ1));
13137 case COMPOUND_EXPR:
13138 /* When pedantic, a compound expression can be neither an lvalue
13139 nor an integer constant expression. */
13140 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13142 /* Don't let (0, 0) be null pointer constant. */
13143 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13144 : fold_convert_loc (loc, type, arg1);
13145 return pedantic_non_lvalue_loc (loc, tem);
13148 if ((TREE_CODE (arg0) == REAL_CST
13149 && TREE_CODE (arg1) == REAL_CST)
13150 || (TREE_CODE (arg0) == INTEGER_CST
13151 && TREE_CODE (arg1) == INTEGER_CST))
13152 return build_complex (type, arg0, arg1);
13156 /* An ASSERT_EXPR should never be passed to fold_binary. */
13157 gcc_unreachable ();
13161 } /* switch (code) */
13164 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13165 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13169 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13171 switch (TREE_CODE (*tp))
13177 *walk_subtrees = 0;
13179 /* ... fall through ... */
13186 /* Return whether the sub-tree ST contains a label which is accessible from
13187 outside the sub-tree. */
13190 contains_label_p (tree st)
13193 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13196 /* Fold a ternary expression of code CODE and type TYPE with operands
13197 OP0, OP1, and OP2. Return the folded expression if folding is
13198 successful. Otherwise, return NULL_TREE. */
13201 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13202 tree op0, tree op1, tree op2)
13205 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13206 enum tree_code_class kind = TREE_CODE_CLASS (code);
13208 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13209 && TREE_CODE_LENGTH (code) == 3);
13211 /* Strip any conversions that don't change the mode. This is safe
13212 for every expression, except for a comparison expression because
13213 its signedness is derived from its operands. So, in the latter
13214 case, only strip conversions that don't change the signedness.
13216 Note that this is done as an internal manipulation within the
13217 constant folder, in order to find the simplest representation of
13218 the arguments so that their form can be studied. In any cases,
13219 the appropriate type conversions should be put back in the tree
13220 that will get out of the constant folder. */
13241 case COMPONENT_REF:
13242 if (TREE_CODE (arg0) == CONSTRUCTOR
13243 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13245 unsigned HOST_WIDE_INT idx;
13247 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13254 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13255 so all simple results must be passed through pedantic_non_lvalue. */
13256 if (TREE_CODE (arg0) == INTEGER_CST)
13258 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13259 tem = integer_zerop (arg0) ? op2 : op1;
13260 /* Only optimize constant conditions when the selected branch
13261 has the same type as the COND_EXPR. This avoids optimizing
13262 away "c ? x : throw", where the throw has a void type.
13263 Avoid throwing away that operand which contains label. */
13264 if ((!TREE_SIDE_EFFECTS (unused_op)
13265 || !contains_label_p (unused_op))
13266 && (! VOID_TYPE_P (TREE_TYPE (tem))
13267 || VOID_TYPE_P (type)))
13268 return pedantic_non_lvalue_loc (loc, tem);
13271 if (operand_equal_p (arg1, op2, 0))
13272 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13274 /* If we have A op B ? A : C, we may be able to convert this to a
13275 simpler expression, depending on the operation and the values
13276 of B and C. Signed zeros prevent all of these transformations,
13277 for reasons given above each one.
13279 Also try swapping the arguments and inverting the conditional. */
13280 if (COMPARISON_CLASS_P (arg0)
13281 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13282 arg1, TREE_OPERAND (arg0, 1))
13283 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13285 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13290 if (COMPARISON_CLASS_P (arg0)
13291 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13293 TREE_OPERAND (arg0, 1))
13294 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13296 tem = fold_truth_not_expr (loc, arg0);
13297 if (tem && COMPARISON_CLASS_P (tem))
13299 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13305 /* If the second operand is simpler than the third, swap them
13306 since that produces better jump optimization results. */
13307 if (truth_value_p (TREE_CODE (arg0))
13308 && tree_swap_operands_p (op1, op2, false))
13310 /* See if this can be inverted. If it can't, possibly because
13311 it was a floating-point inequality comparison, don't do
13313 tem = fold_truth_not_expr (loc, arg0);
13315 return fold_build3_loc (loc, code, type, tem, op2, op1);
13318 /* Convert A ? 1 : 0 to simply A. */
13319 if (integer_onep (op1)
13320 && integer_zerop (op2)
13321 /* If we try to convert OP0 to our type, the
13322 call to fold will try to move the conversion inside
13323 a COND, which will recurse. In that case, the COND_EXPR
13324 is probably the best choice, so leave it alone. */
13325 && type == TREE_TYPE (arg0))
13326 return pedantic_non_lvalue_loc (loc, arg0);
13328 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13329 over COND_EXPR in cases such as floating point comparisons. */
13330 if (integer_zerop (op1)
13331 && integer_onep (op2)
13332 && truth_value_p (TREE_CODE (arg0)))
13333 return pedantic_non_lvalue_loc (loc,
13334 fold_convert_loc (loc, type,
13335 invert_truthvalue_loc (loc,
13338 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13339 if (TREE_CODE (arg0) == LT_EXPR
13340 && integer_zerop (TREE_OPERAND (arg0, 1))
13341 && integer_zerop (op2)
13342 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13344 /* sign_bit_p only checks ARG1 bits within A's precision.
13345 If <sign bit of A> has wider type than A, bits outside
13346 of A's precision in <sign bit of A> need to be checked.
13347 If they are all 0, this optimization needs to be done
13348 in unsigned A's type, if they are all 1 in signed A's type,
13349 otherwise this can't be done. */
13350 if (TYPE_PRECISION (TREE_TYPE (tem))
13351 < TYPE_PRECISION (TREE_TYPE (arg1))
13352 && TYPE_PRECISION (TREE_TYPE (tem))
13353 < TYPE_PRECISION (type))
13355 unsigned HOST_WIDE_INT mask_lo;
13356 HOST_WIDE_INT mask_hi;
13357 int inner_width, outer_width;
13360 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13361 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13362 if (outer_width > TYPE_PRECISION (type))
13363 outer_width = TYPE_PRECISION (type);
13365 if (outer_width > HOST_BITS_PER_WIDE_INT)
13367 mask_hi = ((unsigned HOST_WIDE_INT) -1
13368 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13374 mask_lo = ((unsigned HOST_WIDE_INT) -1
13375 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13377 if (inner_width > HOST_BITS_PER_WIDE_INT)
13379 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13380 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13384 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13385 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13387 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13388 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13390 tem_type = signed_type_for (TREE_TYPE (tem));
13391 tem = fold_convert_loc (loc, tem_type, tem);
13393 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13394 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13396 tem_type = unsigned_type_for (TREE_TYPE (tem));
13397 tem = fold_convert_loc (loc, tem_type, tem);
13405 fold_convert_loc (loc, type,
13406 fold_build2_loc (loc, BIT_AND_EXPR,
13407 TREE_TYPE (tem), tem,
13408 fold_convert_loc (loc,
13413 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13414 already handled above. */
13415 if (TREE_CODE (arg0) == BIT_AND_EXPR
13416 && integer_onep (TREE_OPERAND (arg0, 1))
13417 && integer_zerop (op2)
13418 && integer_pow2p (arg1))
13420 tree tem = TREE_OPERAND (arg0, 0);
13422 if (TREE_CODE (tem) == RSHIFT_EXPR
13423 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13424 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13425 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13426 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13427 TREE_OPERAND (tem, 0), arg1);
13430 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13431 is probably obsolete because the first operand should be a
13432 truth value (that's why we have the two cases above), but let's
13433 leave it in until we can confirm this for all front-ends. */
13434 if (integer_zerop (op2)
13435 && TREE_CODE (arg0) == NE_EXPR
13436 && integer_zerop (TREE_OPERAND (arg0, 1))
13437 && integer_pow2p (arg1)
13438 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13439 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13440 arg1, OEP_ONLY_CONST))
13441 return pedantic_non_lvalue_loc (loc,
13442 fold_convert_loc (loc, type,
13443 TREE_OPERAND (arg0, 0)));
13445 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13446 if (integer_zerop (op2)
13447 && truth_value_p (TREE_CODE (arg0))
13448 && truth_value_p (TREE_CODE (arg1)))
13449 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13450 fold_convert_loc (loc, type, arg0),
13453 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13454 if (integer_onep (op2)
13455 && truth_value_p (TREE_CODE (arg0))
13456 && truth_value_p (TREE_CODE (arg1)))
13458 /* Only perform transformation if ARG0 is easily inverted. */
13459 tem = fold_truth_not_expr (loc, arg0);
13461 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13462 fold_convert_loc (loc, type, tem),
13466 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13467 if (integer_zerop (arg1)
13468 && truth_value_p (TREE_CODE (arg0))
13469 && truth_value_p (TREE_CODE (op2)))
13471 /* Only perform transformation if ARG0 is easily inverted. */
13472 tem = fold_truth_not_expr (loc, arg0);
13474 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13475 fold_convert_loc (loc, type, tem),
13479 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13480 if (integer_onep (arg1)
13481 && truth_value_p (TREE_CODE (arg0))
13482 && truth_value_p (TREE_CODE (op2)))
13483 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13484 fold_convert_loc (loc, type, arg0),
13490 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13491 of fold_ternary on them. */
13492 gcc_unreachable ();
13494 case BIT_FIELD_REF:
13495 if ((TREE_CODE (arg0) == VECTOR_CST
13496 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13497 && type == TREE_TYPE (TREE_TYPE (arg0)))
13499 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13500 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13503 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13504 && (idx % width) == 0
13505 && (idx = idx / width)
13506 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13508 tree elements = NULL_TREE;
13510 if (TREE_CODE (arg0) == VECTOR_CST)
13511 elements = TREE_VECTOR_CST_ELTS (arg0);
13514 unsigned HOST_WIDE_INT idx;
13517 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13518 elements = tree_cons (NULL_TREE, value, elements);
13520 while (idx-- > 0 && elements)
13521 elements = TREE_CHAIN (elements);
13523 return TREE_VALUE (elements);
13525 return build_zero_cst (type);
13529 /* A bit-field-ref that referenced the full argument can be stripped. */
13530 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13531 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13532 && integer_zerop (op2))
13533 return fold_convert_loc (loc, type, arg0);
13538 /* For integers we can decompose the FMA if possible. */
13539 if (TREE_CODE (arg0) == INTEGER_CST
13540 && TREE_CODE (arg1) == INTEGER_CST)
13541 return fold_build2_loc (loc, PLUS_EXPR, type,
13542 const_binop (MULT_EXPR, arg0, arg1), arg2);
13543 if (integer_zerop (arg2))
13544 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13546 return fold_fma (loc, type, arg0, arg1, arg2);
13550 } /* switch (code) */
13553 /* Perform constant folding and related simplification of EXPR.
13554 The related simplifications include x*1 => x, x*0 => 0, etc.,
13555 and application of the associative law.
13556 NOP_EXPR conversions may be removed freely (as long as we
13557 are careful not to change the type of the overall expression).
13558 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13559 but we can constant-fold them if they have constant operands. */
13561 #ifdef ENABLE_FOLD_CHECKING
13562 # define fold(x) fold_1 (x)
13563 static tree fold_1 (tree);
13569 const tree t = expr;
13570 enum tree_code code = TREE_CODE (t);
13571 enum tree_code_class kind = TREE_CODE_CLASS (code);
13573 location_t loc = EXPR_LOCATION (expr);
13575 /* Return right away if a constant. */
13576 if (kind == tcc_constant)
13579 /* CALL_EXPR-like objects with variable numbers of operands are
13580 treated specially. */
13581 if (kind == tcc_vl_exp)
13583 if (code == CALL_EXPR)
13585 tem = fold_call_expr (loc, expr, false);
13586 return tem ? tem : expr;
13591 if (IS_EXPR_CODE_CLASS (kind))
13593 tree type = TREE_TYPE (t);
13594 tree op0, op1, op2;
13596 switch (TREE_CODE_LENGTH (code))
13599 op0 = TREE_OPERAND (t, 0);
13600 tem = fold_unary_loc (loc, code, type, op0);
13601 return tem ? tem : expr;
13603 op0 = TREE_OPERAND (t, 0);
13604 op1 = TREE_OPERAND (t, 1);
13605 tem = fold_binary_loc (loc, code, type, op0, op1);
13606 return tem ? tem : expr;
13608 op0 = TREE_OPERAND (t, 0);
13609 op1 = TREE_OPERAND (t, 1);
13610 op2 = TREE_OPERAND (t, 2);
13611 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13612 return tem ? tem : expr;
13622 tree op0 = TREE_OPERAND (t, 0);
13623 tree op1 = TREE_OPERAND (t, 1);
13625 if (TREE_CODE (op1) == INTEGER_CST
13626 && TREE_CODE (op0) == CONSTRUCTOR
13627 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13629 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13630 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13631 unsigned HOST_WIDE_INT begin = 0;
13633 /* Find a matching index by means of a binary search. */
13634 while (begin != end)
13636 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13637 tree index = VEC_index (constructor_elt, elts, middle)->index;
13639 if (TREE_CODE (index) == INTEGER_CST
13640 && tree_int_cst_lt (index, op1))
13641 begin = middle + 1;
13642 else if (TREE_CODE (index) == INTEGER_CST
13643 && tree_int_cst_lt (op1, index))
13645 else if (TREE_CODE (index) == RANGE_EXPR
13646 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13647 begin = middle + 1;
13648 else if (TREE_CODE (index) == RANGE_EXPR
13649 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13652 return VEC_index (constructor_elt, elts, middle)->value;
13660 return fold (DECL_INITIAL (t));
13664 } /* switch (code) */
13667 #ifdef ENABLE_FOLD_CHECKING
13670 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13671 static void fold_check_failed (const_tree, const_tree);
13672 void print_fold_checksum (const_tree);
13674 /* When --enable-checking=fold, compute a digest of expr before
13675 and after actual fold call to see if fold did not accidentally
13676 change original expr. */
13682 struct md5_ctx ctx;
13683 unsigned char checksum_before[16], checksum_after[16];
13686 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13687 md5_init_ctx (&ctx);
13688 fold_checksum_tree (expr, &ctx, ht);
13689 md5_finish_ctx (&ctx, checksum_before);
13692 ret = fold_1 (expr);
13694 md5_init_ctx (&ctx);
13695 fold_checksum_tree (expr, &ctx, ht);
13696 md5_finish_ctx (&ctx, checksum_after);
13699 if (memcmp (checksum_before, checksum_after, 16))
13700 fold_check_failed (expr, ret);
13706 print_fold_checksum (const_tree expr)
13708 struct md5_ctx ctx;
13709 unsigned char checksum[16], cnt;
13712 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13713 md5_init_ctx (&ctx);
13714 fold_checksum_tree (expr, &ctx, ht);
13715 md5_finish_ctx (&ctx, checksum);
13717 for (cnt = 0; cnt < 16; ++cnt)
13718 fprintf (stderr, "%02x", checksum[cnt]);
13719 putc ('\n', stderr);
13723 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13725 internal_error ("fold check: original tree changed by fold");
13729 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13732 enum tree_code code;
13733 union tree_node buf;
13738 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13739 <= sizeof (struct tree_function_decl))
13740 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13743 slot = (void **) htab_find_slot (ht, expr, INSERT);
13746 *slot = CONST_CAST_TREE (expr);
13747 code = TREE_CODE (expr);
13748 if (TREE_CODE_CLASS (code) == tcc_declaration
13749 && DECL_ASSEMBLER_NAME_SET_P (expr))
13751 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13752 memcpy ((char *) &buf, expr, tree_size (expr));
13753 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13754 expr = (tree) &buf;
13756 else if (TREE_CODE_CLASS (code) == tcc_type
13757 && (TYPE_POINTER_TO (expr)
13758 || TYPE_REFERENCE_TO (expr)
13759 || TYPE_CACHED_VALUES_P (expr)
13760 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13761 || TYPE_NEXT_VARIANT (expr)))
13763 /* Allow these fields to be modified. */
13765 memcpy ((char *) &buf, expr, tree_size (expr));
13766 expr = tmp = (tree) &buf;
13767 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13768 TYPE_POINTER_TO (tmp) = NULL;
13769 TYPE_REFERENCE_TO (tmp) = NULL;
13770 TYPE_NEXT_VARIANT (tmp) = NULL;
13771 if (TYPE_CACHED_VALUES_P (tmp))
13773 TYPE_CACHED_VALUES_P (tmp) = 0;
13774 TYPE_CACHED_VALUES (tmp) = NULL;
13777 md5_process_bytes (expr, tree_size (expr), ctx);
13778 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13779 if (TREE_CODE_CLASS (code) != tcc_type
13780 && TREE_CODE_CLASS (code) != tcc_declaration
13781 && code != TREE_LIST
13782 && code != SSA_NAME)
13783 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13784 switch (TREE_CODE_CLASS (code))
13790 md5_process_bytes (TREE_STRING_POINTER (expr),
13791 TREE_STRING_LENGTH (expr), ctx);
13794 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13795 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13798 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13804 case tcc_exceptional:
13808 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13809 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13810 expr = TREE_CHAIN (expr);
13811 goto recursive_label;
13814 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13815 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13821 case tcc_expression:
13822 case tcc_reference:
13823 case tcc_comparison:
13826 case tcc_statement:
13828 len = TREE_OPERAND_LENGTH (expr);
13829 for (i = 0; i < len; ++i)
13830 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13832 case tcc_declaration:
13833 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13834 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13835 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13837 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13838 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13839 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13840 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13841 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13843 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13844 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13846 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13848 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13849 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13850 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13854 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13855 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13856 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13857 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13858 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13859 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13860 if (INTEGRAL_TYPE_P (expr)
13861 || SCALAR_FLOAT_TYPE_P (expr))
13863 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13864 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13866 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13867 if (TREE_CODE (expr) == RECORD_TYPE
13868 || TREE_CODE (expr) == UNION_TYPE
13869 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13870 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13871 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13878 /* Helper function for outputting the checksum of a tree T. When
13879 debugging with gdb, you can "define mynext" to be "next" followed
13880 by "call debug_fold_checksum (op0)", then just trace down till the
13883 DEBUG_FUNCTION void
13884 debug_fold_checksum (const_tree t)
13887 unsigned char checksum[16];
13888 struct md5_ctx ctx;
13889 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13891 md5_init_ctx (&ctx);
13892 fold_checksum_tree (t, &ctx, ht);
13893 md5_finish_ctx (&ctx, checksum);
13896 for (i = 0; i < 16; i++)
13897 fprintf (stderr, "%d ", checksum[i]);
13899 fprintf (stderr, "\n");
13904 /* Fold a unary tree expression with code CODE of type TYPE with an
13905 operand OP0. LOC is the location of the resulting expression.
13906 Return a folded expression if successful. Otherwise, return a tree
13907 expression with code CODE of type TYPE with an operand OP0. */
13910 fold_build1_stat_loc (location_t loc,
13911 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13914 #ifdef ENABLE_FOLD_CHECKING
13915 unsigned char checksum_before[16], checksum_after[16];
13916 struct md5_ctx ctx;
13919 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13920 md5_init_ctx (&ctx);
13921 fold_checksum_tree (op0, &ctx, ht);
13922 md5_finish_ctx (&ctx, checksum_before);
13926 tem = fold_unary_loc (loc, code, type, op0);
13928 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13930 #ifdef ENABLE_FOLD_CHECKING
13931 md5_init_ctx (&ctx);
13932 fold_checksum_tree (op0, &ctx, ht);
13933 md5_finish_ctx (&ctx, checksum_after);
13936 if (memcmp (checksum_before, checksum_after, 16))
13937 fold_check_failed (op0, tem);
13942 /* Fold a binary tree expression with code CODE of type TYPE with
13943 operands OP0 and OP1. LOC is the location of the resulting
13944 expression. Return a folded expression if successful. Otherwise,
13945 return a tree expression with code CODE of type TYPE with operands
13949 fold_build2_stat_loc (location_t loc,
13950 enum tree_code code, tree type, tree op0, tree op1
13954 #ifdef ENABLE_FOLD_CHECKING
13955 unsigned char checksum_before_op0[16],
13956 checksum_before_op1[16],
13957 checksum_after_op0[16],
13958 checksum_after_op1[16];
13959 struct md5_ctx ctx;
13962 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13963 md5_init_ctx (&ctx);
13964 fold_checksum_tree (op0, &ctx, ht);
13965 md5_finish_ctx (&ctx, checksum_before_op0);
13968 md5_init_ctx (&ctx);
13969 fold_checksum_tree (op1, &ctx, ht);
13970 md5_finish_ctx (&ctx, checksum_before_op1);
13974 tem = fold_binary_loc (loc, code, type, op0, op1);
13976 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13978 #ifdef ENABLE_FOLD_CHECKING
13979 md5_init_ctx (&ctx);
13980 fold_checksum_tree (op0, &ctx, ht);
13981 md5_finish_ctx (&ctx, checksum_after_op0);
13984 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13985 fold_check_failed (op0, tem);
13987 md5_init_ctx (&ctx);
13988 fold_checksum_tree (op1, &ctx, ht);
13989 md5_finish_ctx (&ctx, checksum_after_op1);
13992 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13993 fold_check_failed (op1, tem);
13998 /* Fold a ternary tree expression with code CODE of type TYPE with
13999 operands OP0, OP1, and OP2. Return a folded expression if
14000 successful. Otherwise, return a tree expression with code CODE of
14001 type TYPE with operands OP0, OP1, and OP2. */
14004 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14005 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14008 #ifdef ENABLE_FOLD_CHECKING
14009 unsigned char checksum_before_op0[16],
14010 checksum_before_op1[16],
14011 checksum_before_op2[16],
14012 checksum_after_op0[16],
14013 checksum_after_op1[16],
14014 checksum_after_op2[16];
14015 struct md5_ctx ctx;
14018 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14019 md5_init_ctx (&ctx);
14020 fold_checksum_tree (op0, &ctx, ht);
14021 md5_finish_ctx (&ctx, checksum_before_op0);
14024 md5_init_ctx (&ctx);
14025 fold_checksum_tree (op1, &ctx, ht);
14026 md5_finish_ctx (&ctx, checksum_before_op1);
14029 md5_init_ctx (&ctx);
14030 fold_checksum_tree (op2, &ctx, ht);
14031 md5_finish_ctx (&ctx, checksum_before_op2);
14035 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14036 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14038 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14040 #ifdef ENABLE_FOLD_CHECKING
14041 md5_init_ctx (&ctx);
14042 fold_checksum_tree (op0, &ctx, ht);
14043 md5_finish_ctx (&ctx, checksum_after_op0);
14046 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14047 fold_check_failed (op0, tem);
14049 md5_init_ctx (&ctx);
14050 fold_checksum_tree (op1, &ctx, ht);
14051 md5_finish_ctx (&ctx, checksum_after_op1);
14054 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14055 fold_check_failed (op1, tem);
14057 md5_init_ctx (&ctx);
14058 fold_checksum_tree (op2, &ctx, ht);
14059 md5_finish_ctx (&ctx, checksum_after_op2);
14062 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14063 fold_check_failed (op2, tem);
14068 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14069 arguments in ARGARRAY, and a null static chain.
14070 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14071 of type TYPE from the given operands as constructed by build_call_array. */
14074 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14075 int nargs, tree *argarray)
14078 #ifdef ENABLE_FOLD_CHECKING
14079 unsigned char checksum_before_fn[16],
14080 checksum_before_arglist[16],
14081 checksum_after_fn[16],
14082 checksum_after_arglist[16];
14083 struct md5_ctx ctx;
14087 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14088 md5_init_ctx (&ctx);
14089 fold_checksum_tree (fn, &ctx, ht);
14090 md5_finish_ctx (&ctx, checksum_before_fn);
14093 md5_init_ctx (&ctx);
14094 for (i = 0; i < nargs; i++)
14095 fold_checksum_tree (argarray[i], &ctx, ht);
14096 md5_finish_ctx (&ctx, checksum_before_arglist);
14100 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14102 #ifdef ENABLE_FOLD_CHECKING
14103 md5_init_ctx (&ctx);
14104 fold_checksum_tree (fn, &ctx, ht);
14105 md5_finish_ctx (&ctx, checksum_after_fn);
14108 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14109 fold_check_failed (fn, tem);
14111 md5_init_ctx (&ctx);
14112 for (i = 0; i < nargs; i++)
14113 fold_checksum_tree (argarray[i], &ctx, ht);
14114 md5_finish_ctx (&ctx, checksum_after_arglist);
14117 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14118 fold_check_failed (NULL_TREE, tem);
14123 /* Perform constant folding and related simplification of initializer
14124 expression EXPR. These behave identically to "fold_buildN" but ignore
14125 potential run-time traps and exceptions that fold must preserve. */
14127 #define START_FOLD_INIT \
14128 int saved_signaling_nans = flag_signaling_nans;\
14129 int saved_trapping_math = flag_trapping_math;\
14130 int saved_rounding_math = flag_rounding_math;\
14131 int saved_trapv = flag_trapv;\
14132 int saved_folding_initializer = folding_initializer;\
14133 flag_signaling_nans = 0;\
14134 flag_trapping_math = 0;\
14135 flag_rounding_math = 0;\
14137 folding_initializer = 1;
14139 #define END_FOLD_INIT \
14140 flag_signaling_nans = saved_signaling_nans;\
14141 flag_trapping_math = saved_trapping_math;\
14142 flag_rounding_math = saved_rounding_math;\
14143 flag_trapv = saved_trapv;\
14144 folding_initializer = saved_folding_initializer;
14147 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14148 tree type, tree op)
14153 result = fold_build1_loc (loc, code, type, op);
14160 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14161 tree type, tree op0, tree op1)
14166 result = fold_build2_loc (loc, code, type, op0, op1);
14173 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14174 tree type, tree op0, tree op1, tree op2)
14179 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14186 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14187 int nargs, tree *argarray)
14192 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14198 #undef START_FOLD_INIT
14199 #undef END_FOLD_INIT
14201 /* Determine if first argument is a multiple of second argument. Return 0 if
14202 it is not, or we cannot easily determined it to be.
14204 An example of the sort of thing we care about (at this point; this routine
14205 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14206 fold cases do now) is discovering that
14208 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14214 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14216 This code also handles discovering that
14218 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14220 is a multiple of 8 so we don't have to worry about dealing with a
14221 possible remainder.
14223 Note that we *look* inside a SAVE_EXPR only to determine how it was
14224 calculated; it is not safe for fold to do much of anything else with the
14225 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14226 at run time. For example, the latter example above *cannot* be implemented
14227 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14228 evaluation time of the original SAVE_EXPR is not necessarily the same at
14229 the time the new expression is evaluated. The only optimization of this
14230 sort that would be valid is changing
14232 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14236 SAVE_EXPR (I) * SAVE_EXPR (J)
14238 (where the same SAVE_EXPR (J) is used in the original and the
14239 transformed version). */
14242 multiple_of_p (tree type, const_tree top, const_tree bottom)
14244 if (operand_equal_p (top, bottom, 0))
14247 if (TREE_CODE (type) != INTEGER_TYPE)
14250 switch (TREE_CODE (top))
14253 /* Bitwise and provides a power of two multiple. If the mask is
14254 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14255 if (!integer_pow2p (bottom))
14260 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14261 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14265 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14266 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14269 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14273 op1 = TREE_OPERAND (top, 1);
14274 /* const_binop may not detect overflow correctly,
14275 so check for it explicitly here. */
14276 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14277 > TREE_INT_CST_LOW (op1)
14278 && TREE_INT_CST_HIGH (op1) == 0
14279 && 0 != (t1 = fold_convert (type,
14280 const_binop (LSHIFT_EXPR,
14283 && !TREE_OVERFLOW (t1))
14284 return multiple_of_p (type, t1, bottom);
14289 /* Can't handle conversions from non-integral or wider integral type. */
14290 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14291 || (TYPE_PRECISION (type)
14292 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14295 /* .. fall through ... */
14298 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14301 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14302 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14305 if (TREE_CODE (bottom) != INTEGER_CST
14306 || integer_zerop (bottom)
14307 || (TYPE_UNSIGNED (type)
14308 && (tree_int_cst_sgn (top) < 0
14309 || tree_int_cst_sgn (bottom) < 0)))
14311 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14319 /* Return true if CODE or TYPE is known to be non-negative. */
14322 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14324 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14325 && truth_value_p (code))
14326 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14327 have a signed:1 type (where the value is -1 and 0). */
14332 /* Return true if (CODE OP0) is known to be non-negative. If the return
14333 value is based on the assumption that signed overflow is undefined,
14334 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14335 *STRICT_OVERFLOW_P. */
14338 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14339 bool *strict_overflow_p)
14341 if (TYPE_UNSIGNED (type))
14347 /* We can't return 1 if flag_wrapv is set because
14348 ABS_EXPR<INT_MIN> = INT_MIN. */
14349 if (!INTEGRAL_TYPE_P (type))
14351 if (TYPE_OVERFLOW_UNDEFINED (type))
14353 *strict_overflow_p = true;
14358 case NON_LVALUE_EXPR:
14360 case FIX_TRUNC_EXPR:
14361 return tree_expr_nonnegative_warnv_p (op0,
14362 strict_overflow_p);
14366 tree inner_type = TREE_TYPE (op0);
14367 tree outer_type = type;
14369 if (TREE_CODE (outer_type) == REAL_TYPE)
14371 if (TREE_CODE (inner_type) == REAL_TYPE)
14372 return tree_expr_nonnegative_warnv_p (op0,
14373 strict_overflow_p);
14374 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14376 if (TYPE_UNSIGNED (inner_type))
14378 return tree_expr_nonnegative_warnv_p (op0,
14379 strict_overflow_p);
14382 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14384 if (TREE_CODE (inner_type) == REAL_TYPE)
14385 return tree_expr_nonnegative_warnv_p (op0,
14386 strict_overflow_p);
14387 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14388 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14389 && TYPE_UNSIGNED (inner_type);
14395 return tree_simple_nonnegative_warnv_p (code, type);
14398 /* We don't know sign of `t', so be conservative and return false. */
14402 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14403 value is based on the assumption that signed overflow is undefined,
14404 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14405 *STRICT_OVERFLOW_P. */
14408 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14409 tree op1, bool *strict_overflow_p)
14411 if (TYPE_UNSIGNED (type))
14416 case POINTER_PLUS_EXPR:
14418 if (FLOAT_TYPE_P (type))
14419 return (tree_expr_nonnegative_warnv_p (op0,
14421 && tree_expr_nonnegative_warnv_p (op1,
14422 strict_overflow_p));
14424 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14425 both unsigned and at least 2 bits shorter than the result. */
14426 if (TREE_CODE (type) == INTEGER_TYPE
14427 && TREE_CODE (op0) == NOP_EXPR
14428 && TREE_CODE (op1) == NOP_EXPR)
14430 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14431 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14432 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14433 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14435 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14436 TYPE_PRECISION (inner2)) + 1;
14437 return prec < TYPE_PRECISION (type);
14443 if (FLOAT_TYPE_P (type))
14445 /* x * x for floating point x is always non-negative. */
14446 if (operand_equal_p (op0, op1, 0))
14448 return (tree_expr_nonnegative_warnv_p (op0,
14450 && tree_expr_nonnegative_warnv_p (op1,
14451 strict_overflow_p));
14454 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14455 both unsigned and their total bits is shorter than the result. */
14456 if (TREE_CODE (type) == INTEGER_TYPE
14457 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14458 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14460 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14461 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14463 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14464 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14467 bool unsigned0 = TYPE_UNSIGNED (inner0);
14468 bool unsigned1 = TYPE_UNSIGNED (inner1);
14470 if (TREE_CODE (op0) == INTEGER_CST)
14471 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14473 if (TREE_CODE (op1) == INTEGER_CST)
14474 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14476 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14477 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14479 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14480 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14481 : TYPE_PRECISION (inner0);
14483 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14484 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14485 : TYPE_PRECISION (inner1);
14487 return precision0 + precision1 < TYPE_PRECISION (type);
14494 return (tree_expr_nonnegative_warnv_p (op0,
14496 || tree_expr_nonnegative_warnv_p (op1,
14497 strict_overflow_p));
14503 case TRUNC_DIV_EXPR:
14504 case CEIL_DIV_EXPR:
14505 case FLOOR_DIV_EXPR:
14506 case ROUND_DIV_EXPR:
14507 return (tree_expr_nonnegative_warnv_p (op0,
14509 && tree_expr_nonnegative_warnv_p (op1,
14510 strict_overflow_p));
14512 case TRUNC_MOD_EXPR:
14513 case CEIL_MOD_EXPR:
14514 case FLOOR_MOD_EXPR:
14515 case ROUND_MOD_EXPR:
14516 return tree_expr_nonnegative_warnv_p (op0,
14517 strict_overflow_p);
14519 return tree_simple_nonnegative_warnv_p (code, type);
14522 /* We don't know sign of `t', so be conservative and return false. */
14526 /* Return true if T is known to be non-negative. If the return
14527 value is based on the assumption that signed overflow is undefined,
14528 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14529 *STRICT_OVERFLOW_P. */
14532 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14534 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14537 switch (TREE_CODE (t))
14540 return tree_int_cst_sgn (t) >= 0;
14543 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14546 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14549 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14551 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14552 strict_overflow_p));
14554 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14557 /* We don't know sign of `t', so be conservative and return false. */
14561 /* Return true if T is known to be non-negative. If the return
14562 value is based on the assumption that signed overflow is undefined,
14563 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14564 *STRICT_OVERFLOW_P. */
14567 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14568 tree arg0, tree arg1, bool *strict_overflow_p)
14570 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14571 switch (DECL_FUNCTION_CODE (fndecl))
14573 CASE_FLT_FN (BUILT_IN_ACOS):
14574 CASE_FLT_FN (BUILT_IN_ACOSH):
14575 CASE_FLT_FN (BUILT_IN_CABS):
14576 CASE_FLT_FN (BUILT_IN_COSH):
14577 CASE_FLT_FN (BUILT_IN_ERFC):
14578 CASE_FLT_FN (BUILT_IN_EXP):
14579 CASE_FLT_FN (BUILT_IN_EXP10):
14580 CASE_FLT_FN (BUILT_IN_EXP2):
14581 CASE_FLT_FN (BUILT_IN_FABS):
14582 CASE_FLT_FN (BUILT_IN_FDIM):
14583 CASE_FLT_FN (BUILT_IN_HYPOT):
14584 CASE_FLT_FN (BUILT_IN_POW10):
14585 CASE_INT_FN (BUILT_IN_FFS):
14586 CASE_INT_FN (BUILT_IN_PARITY):
14587 CASE_INT_FN (BUILT_IN_POPCOUNT):
14588 case BUILT_IN_BSWAP32:
14589 case BUILT_IN_BSWAP64:
14593 CASE_FLT_FN (BUILT_IN_SQRT):
14594 /* sqrt(-0.0) is -0.0. */
14595 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14597 return tree_expr_nonnegative_warnv_p (arg0,
14598 strict_overflow_p);
14600 CASE_FLT_FN (BUILT_IN_ASINH):
14601 CASE_FLT_FN (BUILT_IN_ATAN):
14602 CASE_FLT_FN (BUILT_IN_ATANH):
14603 CASE_FLT_FN (BUILT_IN_CBRT):
14604 CASE_FLT_FN (BUILT_IN_CEIL):
14605 CASE_FLT_FN (BUILT_IN_ERF):
14606 CASE_FLT_FN (BUILT_IN_EXPM1):
14607 CASE_FLT_FN (BUILT_IN_FLOOR):
14608 CASE_FLT_FN (BUILT_IN_FMOD):
14609 CASE_FLT_FN (BUILT_IN_FREXP):
14610 CASE_FLT_FN (BUILT_IN_LCEIL):
14611 CASE_FLT_FN (BUILT_IN_LDEXP):
14612 CASE_FLT_FN (BUILT_IN_LFLOOR):
14613 CASE_FLT_FN (BUILT_IN_LLCEIL):
14614 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14615 CASE_FLT_FN (BUILT_IN_LLRINT):
14616 CASE_FLT_FN (BUILT_IN_LLROUND):
14617 CASE_FLT_FN (BUILT_IN_LRINT):
14618 CASE_FLT_FN (BUILT_IN_LROUND):
14619 CASE_FLT_FN (BUILT_IN_MODF):
14620 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14621 CASE_FLT_FN (BUILT_IN_RINT):
14622 CASE_FLT_FN (BUILT_IN_ROUND):
14623 CASE_FLT_FN (BUILT_IN_SCALB):
14624 CASE_FLT_FN (BUILT_IN_SCALBLN):
14625 CASE_FLT_FN (BUILT_IN_SCALBN):
14626 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14627 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14628 CASE_FLT_FN (BUILT_IN_SINH):
14629 CASE_FLT_FN (BUILT_IN_TANH):
14630 CASE_FLT_FN (BUILT_IN_TRUNC):
14631 /* True if the 1st argument is nonnegative. */
14632 return tree_expr_nonnegative_warnv_p (arg0,
14633 strict_overflow_p);
14635 CASE_FLT_FN (BUILT_IN_FMAX):
14636 /* True if the 1st OR 2nd arguments are nonnegative. */
14637 return (tree_expr_nonnegative_warnv_p (arg0,
14639 || (tree_expr_nonnegative_warnv_p (arg1,
14640 strict_overflow_p)));
14642 CASE_FLT_FN (BUILT_IN_FMIN):
14643 /* True if the 1st AND 2nd arguments are nonnegative. */
14644 return (tree_expr_nonnegative_warnv_p (arg0,
14646 && (tree_expr_nonnegative_warnv_p (arg1,
14647 strict_overflow_p)));
14649 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14650 /* True if the 2nd argument is nonnegative. */
14651 return tree_expr_nonnegative_warnv_p (arg1,
14652 strict_overflow_p);
14654 CASE_FLT_FN (BUILT_IN_POWI):
14655 /* True if the 1st argument is nonnegative or the second
14656 argument is an even integer. */
14657 if (TREE_CODE (arg1) == INTEGER_CST
14658 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14660 return tree_expr_nonnegative_warnv_p (arg0,
14661 strict_overflow_p);
14663 CASE_FLT_FN (BUILT_IN_POW):
14664 /* True if the 1st argument is nonnegative or the second
14665 argument is an even integer valued real. */
14666 if (TREE_CODE (arg1) == REAL_CST)
14671 c = TREE_REAL_CST (arg1);
14672 n = real_to_integer (&c);
14675 REAL_VALUE_TYPE cint;
14676 real_from_integer (&cint, VOIDmode, n,
14677 n < 0 ? -1 : 0, 0);
14678 if (real_identical (&c, &cint))
14682 return tree_expr_nonnegative_warnv_p (arg0,
14683 strict_overflow_p);
14688 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14692 /* Return true if T is known to be non-negative. If the return
14693 value is based on the assumption that signed overflow is undefined,
14694 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14695 *STRICT_OVERFLOW_P. */
14698 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14700 enum tree_code code = TREE_CODE (t);
14701 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14708 tree temp = TARGET_EXPR_SLOT (t);
14709 t = TARGET_EXPR_INITIAL (t);
14711 /* If the initializer is non-void, then it's a normal expression
14712 that will be assigned to the slot. */
14713 if (!VOID_TYPE_P (t))
14714 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14716 /* Otherwise, the initializer sets the slot in some way. One common
14717 way is an assignment statement at the end of the initializer. */
14720 if (TREE_CODE (t) == BIND_EXPR)
14721 t = expr_last (BIND_EXPR_BODY (t));
14722 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14723 || TREE_CODE (t) == TRY_CATCH_EXPR)
14724 t = expr_last (TREE_OPERAND (t, 0));
14725 else if (TREE_CODE (t) == STATEMENT_LIST)
14730 if (TREE_CODE (t) == MODIFY_EXPR
14731 && TREE_OPERAND (t, 0) == temp)
14732 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14733 strict_overflow_p);
14740 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14741 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14743 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14744 get_callee_fndecl (t),
14747 strict_overflow_p);
14749 case COMPOUND_EXPR:
14751 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14752 strict_overflow_p);
14754 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14755 strict_overflow_p);
14757 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14758 strict_overflow_p);
14761 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14765 /* We don't know sign of `t', so be conservative and return false. */
14769 /* Return true if T is known to be non-negative. If the return
14770 value is based on the assumption that signed overflow is undefined,
14771 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14772 *STRICT_OVERFLOW_P. */
14775 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14777 enum tree_code code;
14778 if (t == error_mark_node)
14781 code = TREE_CODE (t);
14782 switch (TREE_CODE_CLASS (code))
14785 case tcc_comparison:
14786 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14788 TREE_OPERAND (t, 0),
14789 TREE_OPERAND (t, 1),
14790 strict_overflow_p);
14793 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14795 TREE_OPERAND (t, 0),
14796 strict_overflow_p);
14799 case tcc_declaration:
14800 case tcc_reference:
14801 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14809 case TRUTH_AND_EXPR:
14810 case TRUTH_OR_EXPR:
14811 case TRUTH_XOR_EXPR:
14812 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14814 TREE_OPERAND (t, 0),
14815 TREE_OPERAND (t, 1),
14816 strict_overflow_p);
14817 case TRUTH_NOT_EXPR:
14818 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14820 TREE_OPERAND (t, 0),
14821 strict_overflow_p);
14828 case WITH_SIZE_EXPR:
14830 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14833 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14837 /* Return true if `t' is known to be non-negative. Handle warnings
14838 about undefined signed overflow. */
14841 tree_expr_nonnegative_p (tree t)
14843 bool ret, strict_overflow_p;
14845 strict_overflow_p = false;
14846 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14847 if (strict_overflow_p)
14848 fold_overflow_warning (("assuming signed overflow does not occur when "
14849 "determining that expression is always "
14851 WARN_STRICT_OVERFLOW_MISC);
14856 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14857 For floating point we further ensure that T is not denormal.
14858 Similar logic is present in nonzero_address in rtlanal.h.
14860 If the return value is based on the assumption that signed overflow
14861 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14862 change *STRICT_OVERFLOW_P. */
14865 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14866 bool *strict_overflow_p)
14871 return tree_expr_nonzero_warnv_p (op0,
14872 strict_overflow_p);
14876 tree inner_type = TREE_TYPE (op0);
14877 tree outer_type = type;
14879 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14880 && tree_expr_nonzero_warnv_p (op0,
14881 strict_overflow_p));
14885 case NON_LVALUE_EXPR:
14886 return tree_expr_nonzero_warnv_p (op0,
14887 strict_overflow_p);
14896 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14897 For floating point we further ensure that T is not denormal.
14898 Similar logic is present in nonzero_address in rtlanal.h.
14900 If the return value is based on the assumption that signed overflow
14901 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14902 change *STRICT_OVERFLOW_P. */
14905 tree_binary_nonzero_warnv_p (enum tree_code code,
14908 tree op1, bool *strict_overflow_p)
14910 bool sub_strict_overflow_p;
14913 case POINTER_PLUS_EXPR:
14915 if (TYPE_OVERFLOW_UNDEFINED (type))
14917 /* With the presence of negative values it is hard
14918 to say something. */
14919 sub_strict_overflow_p = false;
14920 if (!tree_expr_nonnegative_warnv_p (op0,
14921 &sub_strict_overflow_p)
14922 || !tree_expr_nonnegative_warnv_p (op1,
14923 &sub_strict_overflow_p))
14925 /* One of operands must be positive and the other non-negative. */
14926 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14927 overflows, on a twos-complement machine the sum of two
14928 nonnegative numbers can never be zero. */
14929 return (tree_expr_nonzero_warnv_p (op0,
14931 || tree_expr_nonzero_warnv_p (op1,
14932 strict_overflow_p));
14937 if (TYPE_OVERFLOW_UNDEFINED (type))
14939 if (tree_expr_nonzero_warnv_p (op0,
14941 && tree_expr_nonzero_warnv_p (op1,
14942 strict_overflow_p))
14944 *strict_overflow_p = true;
14951 sub_strict_overflow_p = false;
14952 if (tree_expr_nonzero_warnv_p (op0,
14953 &sub_strict_overflow_p)
14954 && tree_expr_nonzero_warnv_p (op1,
14955 &sub_strict_overflow_p))
14957 if (sub_strict_overflow_p)
14958 *strict_overflow_p = true;
14963 sub_strict_overflow_p = false;
14964 if (tree_expr_nonzero_warnv_p (op0,
14965 &sub_strict_overflow_p))
14967 if (sub_strict_overflow_p)
14968 *strict_overflow_p = true;
14970 /* When both operands are nonzero, then MAX must be too. */
14971 if (tree_expr_nonzero_warnv_p (op1,
14972 strict_overflow_p))
14975 /* MAX where operand 0 is positive is positive. */
14976 return tree_expr_nonnegative_warnv_p (op0,
14977 strict_overflow_p);
14979 /* MAX where operand 1 is positive is positive. */
14980 else if (tree_expr_nonzero_warnv_p (op1,
14981 &sub_strict_overflow_p)
14982 && tree_expr_nonnegative_warnv_p (op1,
14983 &sub_strict_overflow_p))
14985 if (sub_strict_overflow_p)
14986 *strict_overflow_p = true;
14992 return (tree_expr_nonzero_warnv_p (op1,
14994 || tree_expr_nonzero_warnv_p (op0,
14995 strict_overflow_p));
15004 /* Return true when T is an address and is known to be nonzero.
15005 For floating point we further ensure that T is not denormal.
15006 Similar logic is present in nonzero_address in rtlanal.h.
15008 If the return value is based on the assumption that signed overflow
15009 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15010 change *STRICT_OVERFLOW_P. */
15013 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15015 bool sub_strict_overflow_p;
15016 switch (TREE_CODE (t))
15019 return !integer_zerop (t);
15023 tree base = TREE_OPERAND (t, 0);
15024 if (!DECL_P (base))
15025 base = get_base_address (base);
15030 /* Weak declarations may link to NULL. Other things may also be NULL
15031 so protect with -fdelete-null-pointer-checks; but not variables
15032 allocated on the stack. */
15034 && (flag_delete_null_pointer_checks
15035 || (DECL_CONTEXT (base)
15036 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15037 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15038 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15040 /* Constants are never weak. */
15041 if (CONSTANT_CLASS_P (base))
15048 sub_strict_overflow_p = false;
15049 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15050 &sub_strict_overflow_p)
15051 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15052 &sub_strict_overflow_p))
15054 if (sub_strict_overflow_p)
15055 *strict_overflow_p = true;
15066 /* Return true when T is an address and is known to be nonzero.
15067 For floating point we further ensure that T is not denormal.
15068 Similar logic is present in nonzero_address in rtlanal.h.
15070 If the return value is based on the assumption that signed overflow
15071 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15072 change *STRICT_OVERFLOW_P. */
15075 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15077 tree type = TREE_TYPE (t);
15078 enum tree_code code;
15080 /* Doing something useful for floating point would need more work. */
15081 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15084 code = TREE_CODE (t);
15085 switch (TREE_CODE_CLASS (code))
15088 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15089 strict_overflow_p);
15091 case tcc_comparison:
15092 return tree_binary_nonzero_warnv_p (code, type,
15093 TREE_OPERAND (t, 0),
15094 TREE_OPERAND (t, 1),
15095 strict_overflow_p);
15097 case tcc_declaration:
15098 case tcc_reference:
15099 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15107 case TRUTH_NOT_EXPR:
15108 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15109 strict_overflow_p);
15111 case TRUTH_AND_EXPR:
15112 case TRUTH_OR_EXPR:
15113 case TRUTH_XOR_EXPR:
15114 return tree_binary_nonzero_warnv_p (code, type,
15115 TREE_OPERAND (t, 0),
15116 TREE_OPERAND (t, 1),
15117 strict_overflow_p);
15124 case WITH_SIZE_EXPR:
15126 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15128 case COMPOUND_EXPR:
15131 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15132 strict_overflow_p);
15135 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15136 strict_overflow_p);
15139 return alloca_call_p (t);
15147 /* Return true when T is an address and is known to be nonzero.
15148 Handle warnings about undefined signed overflow. */
15151 tree_expr_nonzero_p (tree t)
15153 bool ret, strict_overflow_p;
15155 strict_overflow_p = false;
15156 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15157 if (strict_overflow_p)
15158 fold_overflow_warning (("assuming signed overflow does not occur when "
15159 "determining that expression is always "
15161 WARN_STRICT_OVERFLOW_MISC);
15165 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15166 attempt to fold the expression to a constant without modifying TYPE,
15169 If the expression could be simplified to a constant, then return
15170 the constant. If the expression would not be simplified to a
15171 constant, then return NULL_TREE. */
15174 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15176 tree tem = fold_binary (code, type, op0, op1);
15177 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15180 /* Given the components of a unary expression CODE, TYPE and OP0,
15181 attempt to fold the expression to a constant without modifying
15184 If the expression could be simplified to a constant, then return
15185 the constant. If the expression would not be simplified to a
15186 constant, then return NULL_TREE. */
15189 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15191 tree tem = fold_unary (code, type, op0);
15192 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15195 /* If EXP represents referencing an element in a constant string
15196 (either via pointer arithmetic or array indexing), return the
15197 tree representing the value accessed, otherwise return NULL. */
15200 fold_read_from_constant_string (tree exp)
15202 if ((TREE_CODE (exp) == INDIRECT_REF
15203 || TREE_CODE (exp) == ARRAY_REF)
15204 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15206 tree exp1 = TREE_OPERAND (exp, 0);
15209 location_t loc = EXPR_LOCATION (exp);
15211 if (TREE_CODE (exp) == INDIRECT_REF)
15212 string = string_constant (exp1, &index);
15215 tree low_bound = array_ref_low_bound (exp);
15216 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15218 /* Optimize the special-case of a zero lower bound.
15220 We convert the low_bound to sizetype to avoid some problems
15221 with constant folding. (E.g. suppose the lower bound is 1,
15222 and its mode is QI. Without the conversion,l (ARRAY
15223 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15224 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15225 if (! integer_zerop (low_bound))
15226 index = size_diffop_loc (loc, index,
15227 fold_convert_loc (loc, sizetype, low_bound));
15233 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15234 && TREE_CODE (string) == STRING_CST
15235 && TREE_CODE (index) == INTEGER_CST
15236 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15237 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15239 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15240 return build_int_cst_type (TREE_TYPE (exp),
15241 (TREE_STRING_POINTER (string)
15242 [TREE_INT_CST_LOW (index)]));
15247 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15248 an integer constant, real, or fixed-point constant.
15250 TYPE is the type of the result. */
15253 fold_negate_const (tree arg0, tree type)
15255 tree t = NULL_TREE;
15257 switch (TREE_CODE (arg0))
15261 double_int val = tree_to_double_int (arg0);
15262 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15264 t = force_fit_type_double (type, val, 1,
15265 (overflow | TREE_OVERFLOW (arg0))
15266 && !TYPE_UNSIGNED (type));
15271 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15276 FIXED_VALUE_TYPE f;
15277 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15278 &(TREE_FIXED_CST (arg0)), NULL,
15279 TYPE_SATURATING (type));
15280 t = build_fixed (type, f);
15281 /* Propagate overflow flags. */
15282 if (overflow_p | TREE_OVERFLOW (arg0))
15283 TREE_OVERFLOW (t) = 1;
15288 gcc_unreachable ();
15294 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15295 an integer constant or real constant.
15297 TYPE is the type of the result. */
15300 fold_abs_const (tree arg0, tree type)
15302 tree t = NULL_TREE;
15304 switch (TREE_CODE (arg0))
15308 double_int val = tree_to_double_int (arg0);
15310 /* If the value is unsigned or non-negative, then the absolute value
15311 is the same as the ordinary value. */
15312 if (TYPE_UNSIGNED (type)
15313 || !double_int_negative_p (val))
15316 /* If the value is negative, then the absolute value is
15322 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15323 t = force_fit_type_double (type, val, -1,
15324 overflow | TREE_OVERFLOW (arg0));
15330 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15331 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15337 gcc_unreachable ();
15343 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15344 constant. TYPE is the type of the result. */
15347 fold_not_const (const_tree arg0, tree type)
15351 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15353 val = double_int_not (tree_to_double_int (arg0));
15354 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15357 /* Given CODE, a relational operator, the target type, TYPE and two
15358 constant operands OP0 and OP1, return the result of the
15359 relational operation. If the result is not a compile time
15360 constant, then return NULL_TREE. */
15363 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15365 int result, invert;
15367 /* From here on, the only cases we handle are when the result is
15368 known to be a constant. */
15370 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15372 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15373 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15375 /* Handle the cases where either operand is a NaN. */
15376 if (real_isnan (c0) || real_isnan (c1))
15386 case UNORDERED_EXPR:
15400 if (flag_trapping_math)
15406 gcc_unreachable ();
15409 return constant_boolean_node (result, type);
15412 return constant_boolean_node (real_compare (code, c0, c1), type);
15415 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15417 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15418 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15419 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15422 /* Handle equality/inequality of complex constants. */
15423 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15425 tree rcond = fold_relational_const (code, type,
15426 TREE_REALPART (op0),
15427 TREE_REALPART (op1));
15428 tree icond = fold_relational_const (code, type,
15429 TREE_IMAGPART (op0),
15430 TREE_IMAGPART (op1));
15431 if (code == EQ_EXPR)
15432 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15433 else if (code == NE_EXPR)
15434 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15439 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15441 To compute GT, swap the arguments and do LT.
15442 To compute GE, do LT and invert the result.
15443 To compute LE, swap the arguments, do LT and invert the result.
15444 To compute NE, do EQ and invert the result.
15446 Therefore, the code below must handle only EQ and LT. */
15448 if (code == LE_EXPR || code == GT_EXPR)
15453 code = swap_tree_comparison (code);
15456 /* Note that it is safe to invert for real values here because we
15457 have already handled the one case that it matters. */
15460 if (code == NE_EXPR || code == GE_EXPR)
15463 code = invert_tree_comparison (code, false);
15466 /* Compute a result for LT or EQ if args permit;
15467 Otherwise return T. */
15468 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15470 if (code == EQ_EXPR)
15471 result = tree_int_cst_equal (op0, op1);
15472 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15473 result = INT_CST_LT_UNSIGNED (op0, op1);
15475 result = INT_CST_LT (op0, op1);
15482 return constant_boolean_node (result, type);
15485 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15486 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15490 fold_build_cleanup_point_expr (tree type, tree expr)
15492 /* If the expression does not have side effects then we don't have to wrap
15493 it with a cleanup point expression. */
15494 if (!TREE_SIDE_EFFECTS (expr))
15497 /* If the expression is a return, check to see if the expression inside the
15498 return has no side effects or the right hand side of the modify expression
15499 inside the return. If either don't have side effects set we don't need to
15500 wrap the expression in a cleanup point expression. Note we don't check the
15501 left hand side of the modify because it should always be a return decl. */
15502 if (TREE_CODE (expr) == RETURN_EXPR)
15504 tree op = TREE_OPERAND (expr, 0);
15505 if (!op || !TREE_SIDE_EFFECTS (op))
15507 op = TREE_OPERAND (op, 1);
15508 if (!TREE_SIDE_EFFECTS (op))
15512 return build1 (CLEANUP_POINT_EXPR, type, expr);
15515 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15516 of an indirection through OP0, or NULL_TREE if no simplification is
15520 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15526 subtype = TREE_TYPE (sub);
15527 if (!POINTER_TYPE_P (subtype))
15530 if (TREE_CODE (sub) == ADDR_EXPR)
15532 tree op = TREE_OPERAND (sub, 0);
15533 tree optype = TREE_TYPE (op);
15534 /* *&CONST_DECL -> to the value of the const decl. */
15535 if (TREE_CODE (op) == CONST_DECL)
15536 return DECL_INITIAL (op);
15537 /* *&p => p; make sure to handle *&"str"[cst] here. */
15538 if (type == optype)
15540 tree fop = fold_read_from_constant_string (op);
15546 /* *(foo *)&fooarray => fooarray[0] */
15547 else if (TREE_CODE (optype) == ARRAY_TYPE
15548 && type == TREE_TYPE (optype)
15549 && (!in_gimple_form
15550 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15552 tree type_domain = TYPE_DOMAIN (optype);
15553 tree min_val = size_zero_node;
15554 if (type_domain && TYPE_MIN_VALUE (type_domain))
15555 min_val = TYPE_MIN_VALUE (type_domain);
15557 && TREE_CODE (min_val) != INTEGER_CST)
15559 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15560 NULL_TREE, NULL_TREE);
15562 /* *(foo *)&complexfoo => __real__ complexfoo */
15563 else if (TREE_CODE (optype) == COMPLEX_TYPE
15564 && type == TREE_TYPE (optype))
15565 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15566 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15567 else if (TREE_CODE (optype) == VECTOR_TYPE
15568 && type == TREE_TYPE (optype))
15570 tree part_width = TYPE_SIZE (type);
15571 tree index = bitsize_int (0);
15572 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15576 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15577 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15579 tree op00 = TREE_OPERAND (sub, 0);
15580 tree op01 = TREE_OPERAND (sub, 1);
15583 if (TREE_CODE (op00) == ADDR_EXPR)
15586 op00 = TREE_OPERAND (op00, 0);
15587 op00type = TREE_TYPE (op00);
15589 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15590 if (TREE_CODE (op00type) == VECTOR_TYPE
15591 && type == TREE_TYPE (op00type))
15593 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15594 tree part_width = TYPE_SIZE (type);
15595 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15596 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15597 tree index = bitsize_int (indexi);
15599 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15600 return fold_build3_loc (loc,
15601 BIT_FIELD_REF, type, op00,
15602 part_width, index);
15605 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15606 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15607 && type == TREE_TYPE (op00type))
15609 tree size = TYPE_SIZE_UNIT (type);
15610 if (tree_int_cst_equal (size, op01))
15611 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15613 /* ((foo *)&fooarray)[1] => fooarray[1] */
15614 else if (TREE_CODE (op00type) == ARRAY_TYPE
15615 && type == TREE_TYPE (op00type))
15617 tree type_domain = TYPE_DOMAIN (op00type);
15618 tree min_val = size_zero_node;
15619 if (type_domain && TYPE_MIN_VALUE (type_domain))
15620 min_val = TYPE_MIN_VALUE (type_domain);
15621 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15622 TYPE_SIZE_UNIT (type));
15623 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15624 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15625 NULL_TREE, NULL_TREE);
15630 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15631 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15632 && type == TREE_TYPE (TREE_TYPE (subtype))
15633 && (!in_gimple_form
15634 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15637 tree min_val = size_zero_node;
15638 sub = build_fold_indirect_ref_loc (loc, sub);
15639 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15640 if (type_domain && TYPE_MIN_VALUE (type_domain))
15641 min_val = TYPE_MIN_VALUE (type_domain);
15643 && TREE_CODE (min_val) != INTEGER_CST)
15645 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15652 /* Builds an expression for an indirection through T, simplifying some
15656 build_fold_indirect_ref_loc (location_t loc, tree t)
15658 tree type = TREE_TYPE (TREE_TYPE (t));
15659 tree sub = fold_indirect_ref_1 (loc, type, t);
15664 return build1_loc (loc, INDIRECT_REF, type, t);
15667 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15670 fold_indirect_ref_loc (location_t loc, tree t)
15672 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15680 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15681 whose result is ignored. The type of the returned tree need not be
15682 the same as the original expression. */
15685 fold_ignored_result (tree t)
15687 if (!TREE_SIDE_EFFECTS (t))
15688 return integer_zero_node;
15691 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15694 t = TREE_OPERAND (t, 0);
15698 case tcc_comparison:
15699 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15700 t = TREE_OPERAND (t, 0);
15701 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15702 t = TREE_OPERAND (t, 1);
15707 case tcc_expression:
15708 switch (TREE_CODE (t))
15710 case COMPOUND_EXPR:
15711 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15713 t = TREE_OPERAND (t, 0);
15717 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15718 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15720 t = TREE_OPERAND (t, 0);
15733 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15734 This can only be applied to objects of a sizetype. */
15737 round_up_loc (location_t loc, tree value, int divisor)
15739 tree div = NULL_TREE;
15741 gcc_assert (divisor > 0);
15745 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15746 have to do anything. Only do this when we are not given a const,
15747 because in that case, this check is more expensive than just
15749 if (TREE_CODE (value) != INTEGER_CST)
15751 div = build_int_cst (TREE_TYPE (value), divisor);
15753 if (multiple_of_p (TREE_TYPE (value), value, div))
15757 /* If divisor is a power of two, simplify this to bit manipulation. */
15758 if (divisor == (divisor & -divisor))
15760 if (TREE_CODE (value) == INTEGER_CST)
15762 double_int val = tree_to_double_int (value);
15765 if ((val.low & (divisor - 1)) == 0)
15768 overflow_p = TREE_OVERFLOW (value);
15769 val.low &= ~(divisor - 1);
15770 val.low += divisor;
15778 return force_fit_type_double (TREE_TYPE (value), val,
15785 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15786 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15787 t = build_int_cst (TREE_TYPE (value), -divisor);
15788 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15794 div = build_int_cst (TREE_TYPE (value), divisor);
15795 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15796 value = size_binop_loc (loc, MULT_EXPR, value, div);
15802 /* Likewise, but round down. */
15805 round_down_loc (location_t loc, tree value, int divisor)
15807 tree div = NULL_TREE;
15809 gcc_assert (divisor > 0);
15813 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15814 have to do anything. Only do this when we are not given a const,
15815 because in that case, this check is more expensive than just
15817 if (TREE_CODE (value) != INTEGER_CST)
15819 div = build_int_cst (TREE_TYPE (value), divisor);
15821 if (multiple_of_p (TREE_TYPE (value), value, div))
15825 /* If divisor is a power of two, simplify this to bit manipulation. */
15826 if (divisor == (divisor & -divisor))
15830 t = build_int_cst (TREE_TYPE (value), -divisor);
15831 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15836 div = build_int_cst (TREE_TYPE (value), divisor);
15837 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15838 value = size_binop_loc (loc, MULT_EXPR, value, div);
15844 /* Returns the pointer to the base of the object addressed by EXP and
15845 extracts the information about the offset of the access, storing it
15846 to PBITPOS and POFFSET. */
15849 split_address_to_core_and_offset (tree exp,
15850 HOST_WIDE_INT *pbitpos, tree *poffset)
15853 enum machine_mode mode;
15854 int unsignedp, volatilep;
15855 HOST_WIDE_INT bitsize;
15856 location_t loc = EXPR_LOCATION (exp);
15858 if (TREE_CODE (exp) == ADDR_EXPR)
15860 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15861 poffset, &mode, &unsignedp, &volatilep,
15863 core = build_fold_addr_expr_loc (loc, core);
15869 *poffset = NULL_TREE;
15875 /* Returns true if addresses of E1 and E2 differ by a constant, false
15876 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15879 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15882 HOST_WIDE_INT bitpos1, bitpos2;
15883 tree toffset1, toffset2, tdiff, type;
15885 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15886 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15888 if (bitpos1 % BITS_PER_UNIT != 0
15889 || bitpos2 % BITS_PER_UNIT != 0
15890 || !operand_equal_p (core1, core2, 0))
15893 if (toffset1 && toffset2)
15895 type = TREE_TYPE (toffset1);
15896 if (type != TREE_TYPE (toffset2))
15897 toffset2 = fold_convert (type, toffset2);
15899 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15900 if (!cst_and_fits_in_hwi (tdiff))
15903 *diff = int_cst_value (tdiff);
15905 else if (toffset1 || toffset2)
15907 /* If only one of the offsets is non-constant, the difference cannot
15914 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15918 /* Simplify the floating point expression EXP when the sign of the
15919 result is not significant. Return NULL_TREE if no simplification
15923 fold_strip_sign_ops (tree exp)
15926 location_t loc = EXPR_LOCATION (exp);
15928 switch (TREE_CODE (exp))
15932 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15933 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15937 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15939 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15940 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15941 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15942 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15943 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15944 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15947 case COMPOUND_EXPR:
15948 arg0 = TREE_OPERAND (exp, 0);
15949 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15951 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15955 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15956 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15958 return fold_build3_loc (loc,
15959 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15960 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15961 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15966 const enum built_in_function fcode = builtin_mathfn_code (exp);
15969 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15970 /* Strip copysign function call, return the 1st argument. */
15971 arg0 = CALL_EXPR_ARG (exp, 0);
15972 arg1 = CALL_EXPR_ARG (exp, 1);
15973 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15976 /* Strip sign ops from the argument of "odd" math functions. */
15977 if (negate_mathfn_p (fcode))
15979 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15981 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);