1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
145 /* Similar to protected_set_expr_location, but never modify x in place,
146 if location can and needs to be set, unshare it. */
149 protected_set_expr_location_unshare (tree x, location_t loc)
151 if (CAN_HAVE_LOCATION_P (x)
152 && EXPR_LOCATION (x) != loc
153 && !(TREE_CODE (x) == SAVE_EXPR
154 || TREE_CODE (x) == TARGET_EXPR
155 || TREE_CODE (x) == BIND_EXPR))
158 SET_EXPR_LOCATION (x, loc);
164 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
165 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
166 and SUM1. Then this yields nonzero if overflow occurred during the
169 Overflow occurs if A and B have the same sign, but A and SUM differ in
170 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
172 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
174 /* If ARG2 divides ARG1 with zero remainder, carries out the division
175 of type CODE and returns the quotient.
176 Otherwise returns NULL_TREE. */
179 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
184 /* The sign of the division is according to operand two, that
185 does the correct thing for POINTER_PLUS_EXPR where we want
186 a signed division. */
187 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
188 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
189 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
192 quo = double_int_divmod (tree_to_double_int (arg1),
193 tree_to_double_int (arg2),
196 if (double_int_zero_p (rem))
197 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
211 static int fold_deferring_overflow_warnings;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
253 if (fold_deferred_overflow_warning != NULL
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
263 if (!issue || warnmsg == NULL)
266 if (gimple_no_warning_p (stmt))
269 /* Use the smallest code level when deciding to issue the
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
274 if (!issue_strict_overflow_warning (code))
278 locus = input_location;
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
284 /* Stop deferring overflow warnings, ignoring any deferred
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL, 0);
293 /* Whether we are deferring overflow warnings. */
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings > 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
307 if (fold_deferring_overflow_warnings > 0)
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
324 negate_mathfn_p (enum built_in_function code)
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
365 /* Check whether we may negate an integer constant T without causing
369 may_negate_without_overflow_p (const_tree t)
371 unsigned HOST_WIDE_INT val;
375 gcc_assert (TREE_CODE (t) == INTEGER_CST);
377 type = TREE_TYPE (t);
378 if (TYPE_UNSIGNED (type))
381 prec = TYPE_PRECISION (type);
382 if (prec > HOST_BITS_PER_WIDE_INT)
384 if (TREE_INT_CST_LOW (t) != 0)
386 prec -= HOST_BITS_PER_WIDE_INT;
387 val = TREE_INT_CST_HIGH (t);
390 val = TREE_INT_CST_LOW (t);
391 if (prec < HOST_BITS_PER_WIDE_INT)
392 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
393 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
396 /* Determine whether an expression T can be cheaply negated using
397 the function negate_expr without introducing undefined overflow. */
400 negate_expr_p (tree t)
407 type = TREE_TYPE (t);
410 switch (TREE_CODE (t))
413 if (TYPE_OVERFLOW_WRAPS (type))
416 /* Check that -CST will not overflow type. */
417 return may_negate_without_overflow_p (t);
419 return (INTEGRAL_TYPE_P (type)
420 && TYPE_OVERFLOW_WRAPS (type));
427 /* We want to canonicalize to positive real constants. Pretend
428 that only negative ones can be easily negated. */
429 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
432 return negate_expr_p (TREE_REALPART (t))
433 && negate_expr_p (TREE_IMAGPART (t));
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
440 return negate_expr_p (TREE_OPERAND (t, 0));
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1));
462 if (TYPE_UNSIGNED (TREE_TYPE (t)))
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
484 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
486 return negate_expr_p (TREE_OPERAND (t, 1))
487 || negate_expr_p (TREE_OPERAND (t, 0));
490 /* Negate -((double)float) as (double)(-float). */
491 if (TREE_CODE (type) == REAL_TYPE)
493 tree tem = strip_float_extensions (t);
495 return negate_expr_p (tem);
500 /* Negate -f(x) as f(-x). */
501 if (negate_mathfn_p (builtin_mathfn_code (t)))
502 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
507 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
509 tree op1 = TREE_OPERAND (t, 1);
510 if (TREE_INT_CST_HIGH (op1) == 0
511 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
512 == TREE_INT_CST_LOW (op1))
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
529 fold_negate_expr (location_t loc, tree t)
531 tree type = TREE_TYPE (t);
534 switch (TREE_CODE (t))
536 /* Convert - (~A) to A + 1. */
538 if (INTEGRAL_TYPE_P (type))
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 build_int_cst (type, 1));
544 tem = fold_negate_const (t, type);
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 || !TYPE_OVERFLOW_TRAPS (type))
551 tem = fold_negate_const (t, type);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
558 tem = fold_negate_const (t, type);
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
575 if (negate_expr_p (t))
576 return fold_build2_loc (loc, COMPLEX_EXPR, type,
577 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
578 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
582 if (negate_expr_p (t))
583 return fold_build1_loc (loc, CONJ_EXPR, type,
584 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
588 return TREE_OPERAND (t, 0);
591 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
592 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
594 /* -(A + B) -> (-B) - A. */
595 if (negate_expr_p (TREE_OPERAND (t, 1))
596 && reorder_operands_p (TREE_OPERAND (t, 0),
597 TREE_OPERAND (t, 1)))
599 tem = negate_expr (TREE_OPERAND (t, 1));
600 return fold_build2_loc (loc, MINUS_EXPR, type,
601 tem, TREE_OPERAND (t, 0));
604 /* -(A + B) -> (-A) - B. */
605 if (negate_expr_p (TREE_OPERAND (t, 0)))
607 tem = negate_expr (TREE_OPERAND (t, 0));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 1));
615 /* - (A - B) -> B - A */
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
617 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
618 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
619 return fold_build2_loc (loc, MINUS_EXPR, type,
620 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
624 if (TYPE_UNSIGNED (type))
630 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
632 tem = TREE_OPERAND (t, 1);
633 if (negate_expr_p (tem))
634 return fold_build2_loc (loc, TREE_CODE (t), type,
635 TREE_OPERAND (t, 0), negate_expr (tem));
636 tem = TREE_OPERAND (t, 0);
637 if (negate_expr_p (tem))
638 return fold_build2_loc (loc, TREE_CODE (t), type,
639 negate_expr (tem), TREE_OPERAND (t, 1));
648 /* In general we can't negate A / B, because if A is INT_MIN and
649 B is 1, we may turn this into INT_MIN / -1 which is undefined
650 and actually traps on some architectures. But if overflow is
651 undefined, we can negate, because - (INT_MIN / 1) is an
653 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
655 const char * const warnmsg = G_("assuming signed overflow does not "
656 "occur when negating a division");
657 tem = TREE_OPERAND (t, 1);
658 if (negate_expr_p (tem))
660 if (INTEGRAL_TYPE_P (type)
661 && (TREE_CODE (tem) != INTEGER_CST
662 || integer_onep (tem)))
663 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
664 return fold_build2_loc (loc, TREE_CODE (t), type,
665 TREE_OPERAND (t, 0), negate_expr (tem));
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
670 if (INTEGRAL_TYPE_P (type)
671 && (TREE_CODE (tem) != INTEGER_CST
672 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
673 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 negate_expr (tem), TREE_OPERAND (t, 1));
681 /* Convert -((double)float) into (double)(-float). */
682 if (TREE_CODE (type) == REAL_TYPE)
684 tem = strip_float_extensions (t);
685 if (tem != t && negate_expr_p (tem))
686 return fold_convert_loc (loc, type, negate_expr (tem));
691 /* Negate -f(x) as f(-x). */
692 if (negate_mathfn_p (builtin_mathfn_code (t))
693 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 fndecl = get_callee_fndecl (t);
698 arg = negate_expr (CALL_EXPR_ARG (t, 0));
699 return build_call_expr_loc (loc, fndecl, 1, arg);
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
705 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
707 tree op1 = TREE_OPERAND (t, 1);
708 if (TREE_INT_CST_HIGH (op1) == 0
709 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
710 == TREE_INT_CST_LOW (op1))
712 tree ntype = TYPE_UNSIGNED (type)
713 ? signed_type_for (type)
714 : unsigned_type_for (type);
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
742 loc = EXPR_LOCATION (t);
743 type = TREE_TYPE (t);
746 tem = fold_negate_expr (loc, t);
748 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
749 return fold_convert_loc (loc, type, tem);
752 /* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
756 the literal in *LITP and return the variable part. If a part isn't
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead.
767 If IN is itself a literal or constant, return it as appropriate.
769 Note that we do not guarantee that any of the three values will be the
770 same type as IN, but they will have the same signedness and mode. */
773 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
774 tree *minus_litp, int negate_p)
782 /* Strip any conversions that don't change the machine mode or signedness. */
783 STRIP_SIGN_NOPS (in);
785 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
786 || TREE_CODE (in) == FIXED_CST)
788 else if (TREE_CODE (in) == code
789 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
790 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
791 /* We can associate addition and subtraction together (even
792 though the C standard doesn't say so) for integers because
793 the value is not affected. For reals, the value might be
794 affected, so we can't. */
795 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
796 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
798 tree op0 = TREE_OPERAND (in, 0);
799 tree op1 = TREE_OPERAND (in, 1);
800 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
801 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
803 /* First see if either of the operands is a literal, then a constant. */
804 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
805 || TREE_CODE (op0) == FIXED_CST)
806 *litp = op0, op0 = 0;
807 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
808 || TREE_CODE (op1) == FIXED_CST)
809 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
811 if (op0 != 0 && TREE_CONSTANT (op0))
812 *conp = op0, op0 = 0;
813 else if (op1 != 0 && TREE_CONSTANT (op1))
814 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
816 /* If we haven't dealt with either operand, this is not a case we can
817 decompose. Otherwise, VAR is either of the ones remaining, if any. */
818 if (op0 != 0 && op1 != 0)
823 var = op1, neg_var_p = neg1_p;
825 /* Now do any needed negations. */
827 *minus_litp = *litp, *litp = 0;
829 *conp = negate_expr (*conp);
831 var = negate_expr (var);
833 else if (TREE_CONSTANT (in))
841 *minus_litp = *litp, *litp = 0;
842 else if (*minus_litp)
843 *litp = *minus_litp, *minus_litp = 0;
844 *conp = negate_expr (*conp);
845 var = negate_expr (var);
851 /* Re-associate trees split by the above function. T1 and T2 are
852 either expressions to associate or null. Return the new
853 expression, if any. LOC is the location of the new expression. If
854 we build an operation, do it in TYPE and with CODE. */
857 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
864 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
865 try to fold this since we will have infinite recursion. But do
866 deal with any NEGATE_EXPRs. */
867 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
868 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
870 if (code == PLUS_EXPR)
872 if (TREE_CODE (t1) == NEGATE_EXPR)
873 return build2_loc (loc, MINUS_EXPR, type,
874 fold_convert_loc (loc, type, t2),
875 fold_convert_loc (loc, type,
876 TREE_OPERAND (t1, 0)));
877 else if (TREE_CODE (t2) == NEGATE_EXPR)
878 return build2_loc (loc, MINUS_EXPR, type,
879 fold_convert_loc (loc, type, t1),
880 fold_convert_loc (loc, type,
881 TREE_OPERAND (t2, 0)));
882 else if (integer_zerop (t2))
883 return fold_convert_loc (loc, type, t1);
885 else if (code == MINUS_EXPR)
887 if (integer_zerop (t2))
888 return fold_convert_loc (loc, type, t1);
891 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
896 fold_convert_loc (loc, type, t2));
899 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
900 for use in int_const_binop, size_binop and size_diffop. */
903 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
905 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
907 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
922 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
923 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
924 && TYPE_MODE (type1) == TYPE_MODE (type2);
928 /* Combine two integer constants ARG1 and ARG2 under operation CODE
929 to produce a new constant. Return NULL_TREE if we don't know how
930 to evaluate CODE at compile-time.
932 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
935 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
937 double_int op1, op2, res, tmp;
939 tree type = TREE_TYPE (arg1);
940 bool uns = TYPE_UNSIGNED (type);
942 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
943 bool overflow = false;
945 op1 = tree_to_double_int (arg1);
946 op2 = tree_to_double_int (arg2);
951 res = double_int_ior (op1, op2);
955 res = double_int_xor (op1, op2);
959 res = double_int_and (op1, op2);
963 res = double_int_rshift (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type), !uns);
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
971 res = double_int_lshift (op1, double_int_to_shwi (op2),
972 TYPE_PRECISION (type), !uns);
976 res = double_int_rrotate (op1, double_int_to_shwi (op2),
977 TYPE_PRECISION (type));
981 res = double_int_lrotate (op1, double_int_to_shwi (op2),
982 TYPE_PRECISION (type));
986 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
987 &res.low, &res.high);
991 neg_double (op2.low, op2.high, &res.low, &res.high);
992 add_double (op1.low, op1.high, res.low, res.high,
993 &res.low, &res.high);
994 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
998 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
999 &res.low, &res.high);
1002 case TRUNC_DIV_EXPR:
1003 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1004 case EXACT_DIV_EXPR:
1005 /* This is a shortcut for a common special case. */
1006 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1007 && !TREE_OVERFLOW (arg1)
1008 && !TREE_OVERFLOW (arg2)
1009 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1011 if (code == CEIL_DIV_EXPR)
1012 op1.low += op2.low - 1;
1014 res.low = op1.low / op2.low, res.high = 0;
1018 /* ... fall through ... */
1020 case ROUND_DIV_EXPR:
1021 if (double_int_zero_p (op2))
1023 if (double_int_one_p (op2))
1028 if (double_int_equal_p (op1, op2)
1029 && ! double_int_zero_p (op1))
1031 res = double_int_one;
1034 overflow = div_and_round_double (code, uns,
1035 op1.low, op1.high, op2.low, op2.high,
1036 &res.low, &res.high,
1037 &tmp.low, &tmp.high);
1040 case TRUNC_MOD_EXPR:
1041 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1042 /* This is a shortcut for a common special case. */
1043 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1044 && !TREE_OVERFLOW (arg1)
1045 && !TREE_OVERFLOW (arg2)
1046 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1048 if (code == CEIL_MOD_EXPR)
1049 op1.low += op2.low - 1;
1050 res.low = op1.low % op2.low, res.high = 0;
1054 /* ... fall through ... */
1056 case ROUND_MOD_EXPR:
1057 if (double_int_zero_p (op2))
1059 overflow = div_and_round_double (code, uns,
1060 op1.low, op1.high, op2.low, op2.high,
1061 &tmp.low, &tmp.high,
1062 &res.low, &res.high);
1066 res = double_int_min (op1, op2, uns);
1070 res = double_int_max (op1, op2, uns);
1079 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1081 /* Propagate overflow flags ourselves. */
1082 if (((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1086 TREE_OVERFLOW (t) = 1;
1090 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1091 ((!uns || is_sizetype) && overflow)
1092 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1103 const_binop (enum tree_code code, tree arg1, tree arg2)
1105 /* Sanity check for the recursive cases. */
1112 if (TREE_CODE (arg1) == INTEGER_CST)
1113 return int_const_binop (code, arg1, arg2, 0);
1115 if (TREE_CODE (arg1) == REAL_CST)
1117 enum machine_mode mode;
1120 REAL_VALUE_TYPE value;
1121 REAL_VALUE_TYPE result;
1125 /* The following codes are handled by real_arithmetic. */
1140 d1 = TREE_REAL_CST (arg1);
1141 d2 = TREE_REAL_CST (arg2);
1143 type = TREE_TYPE (arg1);
1144 mode = TYPE_MODE (type);
1146 /* Don't perform operation if we honor signaling NaNs and
1147 either operand is a NaN. */
1148 if (HONOR_SNANS (mode)
1149 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1152 /* Don't perform operation if it would raise a division
1153 by zero exception. */
1154 if (code == RDIV_EXPR
1155 && REAL_VALUES_EQUAL (d2, dconst0)
1156 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1159 /* If either operand is a NaN, just return it. Otherwise, set up
1160 for floating-point trap; we return an overflow. */
1161 if (REAL_VALUE_ISNAN (d1))
1163 else if (REAL_VALUE_ISNAN (d2))
1166 inexact = real_arithmetic (&value, code, &d1, &d2);
1167 real_convert (&result, mode, &value);
1169 /* Don't constant fold this floating point operation if
1170 the result has overflowed and flag_trapping_math. */
1171 if (flag_trapping_math
1172 && MODE_HAS_INFINITIES (mode)
1173 && REAL_VALUE_ISINF (result)
1174 && !REAL_VALUE_ISINF (d1)
1175 && !REAL_VALUE_ISINF (d2))
1178 /* Don't constant fold this floating point operation if the
1179 result may dependent upon the run-time rounding mode and
1180 flag_rounding_math is set, or if GCC's software emulation
1181 is unable to accurately represent the result. */
1182 if ((flag_rounding_math
1183 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1184 && (inexact || !real_identical (&result, &value)))
1187 t = build_real (type, result);
1189 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1193 if (TREE_CODE (arg1) == FIXED_CST)
1195 FIXED_VALUE_TYPE f1;
1196 FIXED_VALUE_TYPE f2;
1197 FIXED_VALUE_TYPE result;
1202 /* The following codes are handled by fixed_arithmetic. */
1208 case TRUNC_DIV_EXPR:
1209 f2 = TREE_FIXED_CST (arg2);
1214 f2.data.high = TREE_INT_CST_HIGH (arg2);
1215 f2.data.low = TREE_INT_CST_LOW (arg2);
1223 f1 = TREE_FIXED_CST (arg1);
1224 type = TREE_TYPE (arg1);
1225 sat_p = TYPE_SATURATING (type);
1226 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1227 t = build_fixed (type, result);
1228 /* Propagate overflow flags. */
1229 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1230 TREE_OVERFLOW (t) = 1;
1234 if (TREE_CODE (arg1) == COMPLEX_CST)
1236 tree type = TREE_TYPE (arg1);
1237 tree r1 = TREE_REALPART (arg1);
1238 tree i1 = TREE_IMAGPART (arg1);
1239 tree r2 = TREE_REALPART (arg2);
1240 tree i2 = TREE_IMAGPART (arg2);
1247 real = const_binop (code, r1, r2);
1248 imag = const_binop (code, i1, i2);
1252 if (COMPLEX_FLOAT_TYPE_P (type))
1253 return do_mpc_arg2 (arg1, arg2, type,
1254 /* do_nonfinite= */ folding_initializer,
1257 real = const_binop (MINUS_EXPR,
1258 const_binop (MULT_EXPR, r1, r2),
1259 const_binop (MULT_EXPR, i1, i2));
1260 imag = const_binop (PLUS_EXPR,
1261 const_binop (MULT_EXPR, r1, i2),
1262 const_binop (MULT_EXPR, i1, r2));
1266 if (COMPLEX_FLOAT_TYPE_P (type))
1267 return do_mpc_arg2 (arg1, arg2, type,
1268 /* do_nonfinite= */ folding_initializer,
1271 case TRUNC_DIV_EXPR:
1273 case FLOOR_DIV_EXPR:
1274 case ROUND_DIV_EXPR:
1275 if (flag_complex_method == 0)
1277 /* Keep this algorithm in sync with
1278 tree-complex.c:expand_complex_div_straight().
1280 Expand complex division to scalars, straightforward algorithm.
1281 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1285 = const_binop (PLUS_EXPR,
1286 const_binop (MULT_EXPR, r2, r2),
1287 const_binop (MULT_EXPR, i2, i2));
1289 = const_binop (PLUS_EXPR,
1290 const_binop (MULT_EXPR, r1, r2),
1291 const_binop (MULT_EXPR, i1, i2));
1293 = const_binop (MINUS_EXPR,
1294 const_binop (MULT_EXPR, i1, r2),
1295 const_binop (MULT_EXPR, r1, i2));
1297 real = const_binop (code, t1, magsquared);
1298 imag = const_binop (code, t2, magsquared);
1302 /* Keep this algorithm in sync with
1303 tree-complex.c:expand_complex_div_wide().
1305 Expand complex division to scalars, modified algorithm to minimize
1306 overflow with wide input ranges. */
1307 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1308 fold_abs_const (r2, TREE_TYPE (type)),
1309 fold_abs_const (i2, TREE_TYPE (type)));
1311 if (integer_nonzerop (compare))
1313 /* In the TRUE branch, we compute
1315 div = (br * ratio) + bi;
1316 tr = (ar * ratio) + ai;
1317 ti = (ai * ratio) - ar;
1320 tree ratio = const_binop (code, r2, i2);
1321 tree div = const_binop (PLUS_EXPR, i2,
1322 const_binop (MULT_EXPR, r2, ratio));
1323 real = const_binop (MULT_EXPR, r1, ratio);
1324 real = const_binop (PLUS_EXPR, real, i1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, i1, ratio);
1328 imag = const_binop (MINUS_EXPR, imag, r1);
1329 imag = const_binop (code, imag, div);
1333 /* In the FALSE branch, we compute
1335 divisor = (d * ratio) + c;
1336 tr = (b * ratio) + a;
1337 ti = b - (a * ratio);
1340 tree ratio = const_binop (code, i2, r2);
1341 tree div = const_binop (PLUS_EXPR, r2,
1342 const_binop (MULT_EXPR, i2, ratio));
1344 real = const_binop (MULT_EXPR, i1, ratio);
1345 real = const_binop (PLUS_EXPR, real, r1);
1346 real = const_binop (code, real, div);
1348 imag = const_binop (MULT_EXPR, r1, ratio);
1349 imag = const_binop (MINUS_EXPR, i1, imag);
1350 imag = const_binop (code, imag, div);
1360 return build_complex (type, real, imag);
1363 if (TREE_CODE (arg1) == VECTOR_CST)
1365 tree type = TREE_TYPE(arg1);
1366 int count = TYPE_VECTOR_SUBPARTS (type), i;
1367 tree elements1, elements2, list = NULL_TREE;
1369 if(TREE_CODE(arg2) != VECTOR_CST)
1372 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1373 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1375 for (i = 0; i < count; i++)
1377 tree elem1, elem2, elem;
1379 /* The trailing elements can be empty and should be treated as 0 */
1381 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1384 elem1 = TREE_VALUE(elements1);
1385 elements1 = TREE_CHAIN (elements1);
1389 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1392 elem2 = TREE_VALUE(elements2);
1393 elements2 = TREE_CHAIN (elements2);
1396 elem = const_binop (code, elem1, elem2);
1398 /* It is possible that const_binop cannot handle the given
1399 code and return NULL_TREE */
1400 if(elem == NULL_TREE)
1403 list = tree_cons (NULL_TREE, elem, list);
1405 return build_vector(type, nreverse(list));
1410 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1411 indicates which particular sizetype to create. */
1414 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1416 return build_int_cst (sizetype_tab[(int) kind], number);
1419 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1420 is a tree code. The type of the result is taken from the operands.
1421 Both must be equivalent integer types, ala int_binop_types_match_p.
1422 If the operands are constant, so is the result. */
1425 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1427 tree type = TREE_TYPE (arg0);
1429 if (arg0 == error_mark_node || arg1 == error_mark_node)
1430 return error_mark_node;
1432 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1435 /* Handle the special case of two integer constants faster. */
1436 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1438 /* And some specific cases even faster than that. */
1439 if (code == PLUS_EXPR)
1441 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1443 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1446 else if (code == MINUS_EXPR)
1448 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1451 else if (code == MULT_EXPR)
1453 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1457 /* Handle general case of two integer constants. */
1458 return int_const_binop (code, arg0, arg1, 0);
1461 return fold_build2_loc (loc, code, type, arg0, arg1);
1464 /* Given two values, either both of sizetype or both of bitsizetype,
1465 compute the difference between the two values. Return the value
1466 in signed type corresponding to the type of the operands. */
1469 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1471 tree type = TREE_TYPE (arg0);
1474 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1477 /* If the type is already signed, just do the simple thing. */
1478 if (!TYPE_UNSIGNED (type))
1479 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1481 if (type == sizetype)
1483 else if (type == bitsizetype)
1484 ctype = sbitsizetype;
1486 ctype = signed_type_for (type);
1488 /* If either operand is not a constant, do the conversions to the signed
1489 type and subtract. The hardware will do the right thing with any
1490 overflow in the subtraction. */
1491 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1492 return size_binop_loc (loc, MINUS_EXPR,
1493 fold_convert_loc (loc, ctype, arg0),
1494 fold_convert_loc (loc, ctype, arg1));
1496 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1497 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1498 overflow) and negate (which can't either). Special-case a result
1499 of zero while we're here. */
1500 if (tree_int_cst_equal (arg0, arg1))
1501 return build_int_cst (ctype, 0);
1502 else if (tree_int_cst_lt (arg1, arg0))
1503 return fold_convert_loc (loc, ctype,
1504 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1506 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1507 fold_convert_loc (loc, ctype,
1508 size_binop_loc (loc,
1513 /* A subroutine of fold_convert_const handling conversions of an
1514 INTEGER_CST to another integer type. */
1517 fold_convert_const_int_from_int (tree type, const_tree arg1)
1521 /* Given an integer constant, make new constant with new type,
1522 appropriately sign-extended or truncated. */
1523 t = force_fit_type_double (type, tree_to_double_int (arg1),
1524 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1525 (TREE_INT_CST_HIGH (arg1) < 0
1526 && (TYPE_UNSIGNED (type)
1527 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1528 | TREE_OVERFLOW (arg1));
1533 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1534 to an integer type. */
1537 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1542 /* The following code implements the floating point to integer
1543 conversion rules required by the Java Language Specification,
1544 that IEEE NaNs are mapped to zero and values that overflow
1545 the target precision saturate, i.e. values greater than
1546 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1547 are mapped to INT_MIN. These semantics are allowed by the
1548 C and C++ standards that simply state that the behavior of
1549 FP-to-integer conversion is unspecified upon overflow. */
1553 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1557 case FIX_TRUNC_EXPR:
1558 real_trunc (&r, VOIDmode, &x);
1565 /* If R is NaN, return zero and show we have an overflow. */
1566 if (REAL_VALUE_ISNAN (r))
1569 val = double_int_zero;
1572 /* See if R is less than the lower bound or greater than the
1577 tree lt = TYPE_MIN_VALUE (type);
1578 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1579 if (REAL_VALUES_LESS (r, l))
1582 val = tree_to_double_int (lt);
1588 tree ut = TYPE_MAX_VALUE (type);
1591 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1592 if (REAL_VALUES_LESS (u, r))
1595 val = tree_to_double_int (ut);
1601 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1603 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1607 /* A subroutine of fold_convert_const handling conversions of a
1608 FIXED_CST to an integer type. */
1611 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1614 double_int temp, temp_trunc;
1617 /* Right shift FIXED_CST to temp by fbit. */
1618 temp = TREE_FIXED_CST (arg1).data;
1619 mode = TREE_FIXED_CST (arg1).mode;
1620 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1622 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1623 HOST_BITS_PER_DOUBLE_INT,
1624 SIGNED_FIXED_POINT_MODE_P (mode));
1626 /* Left shift temp to temp_trunc by fbit. */
1627 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1628 HOST_BITS_PER_DOUBLE_INT,
1629 SIGNED_FIXED_POINT_MODE_P (mode));
1633 temp = double_int_zero;
1634 temp_trunc = double_int_zero;
1637 /* If FIXED_CST is negative, we need to round the value toward 0.
1638 By checking if the fractional bits are not zero to add 1 to temp. */
1639 if (SIGNED_FIXED_POINT_MODE_P (mode)
1640 && double_int_negative_p (temp_trunc)
1641 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1642 temp = double_int_add (temp, double_int_one);
1644 /* Given a fixed-point constant, make new constant with new type,
1645 appropriately sign-extended or truncated. */
1646 t = force_fit_type_double (type, temp, -1,
1647 (double_int_negative_p (temp)
1648 && (TYPE_UNSIGNED (type)
1649 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1650 | TREE_OVERFLOW (arg1));
1655 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1656 to another floating point type. */
1659 fold_convert_const_real_from_real (tree type, const_tree arg1)
1661 REAL_VALUE_TYPE value;
1664 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1665 t = build_real (type, value);
1667 /* If converting an infinity or NAN to a representation that doesn't
1668 have one, set the overflow bit so that we can produce some kind of
1669 error message at the appropriate point if necessary. It's not the
1670 most user-friendly message, but it's better than nothing. */
1671 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1672 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1673 TREE_OVERFLOW (t) = 1;
1674 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1675 && !MODE_HAS_NANS (TYPE_MODE (type)))
1676 TREE_OVERFLOW (t) = 1;
1677 /* Regular overflow, conversion produced an infinity in a mode that
1678 can't represent them. */
1679 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1680 && REAL_VALUE_ISINF (value)
1681 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1682 TREE_OVERFLOW (t) = 1;
1684 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1688 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1689 to a floating point type. */
1692 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1694 REAL_VALUE_TYPE value;
1697 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1698 t = build_real (type, value);
1700 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1704 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1705 to another fixed-point type. */
1708 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1710 FIXED_VALUE_TYPE value;
1714 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1715 TYPE_SATURATING (type));
1716 t = build_fixed (type, value);
1718 /* Propagate overflow flags. */
1719 if (overflow_p | TREE_OVERFLOW (arg1))
1720 TREE_OVERFLOW (t) = 1;
1724 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1725 to a fixed-point type. */
1728 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1730 FIXED_VALUE_TYPE value;
1734 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1735 TREE_INT_CST (arg1),
1736 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1737 TYPE_SATURATING (type));
1738 t = build_fixed (type, value);
1740 /* Propagate overflow flags. */
1741 if (overflow_p | TREE_OVERFLOW (arg1))
1742 TREE_OVERFLOW (t) = 1;
1746 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1747 to a fixed-point type. */
1750 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1752 FIXED_VALUE_TYPE value;
1756 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1757 &TREE_REAL_CST (arg1),
1758 TYPE_SATURATING (type));
1759 t = build_fixed (type, value);
1761 /* Propagate overflow flags. */
1762 if (overflow_p | TREE_OVERFLOW (arg1))
1763 TREE_OVERFLOW (t) = 1;
1767 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1768 type TYPE. If no simplification can be done return NULL_TREE. */
1771 fold_convert_const (enum tree_code code, tree type, tree arg1)
1773 if (TREE_TYPE (arg1) == type)
1776 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1777 || TREE_CODE (type) == OFFSET_TYPE)
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return fold_convert_const_int_from_int (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_int_from_real (code, type, arg1);
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_int_from_fixed (type, arg1);
1786 else if (TREE_CODE (type) == REAL_TYPE)
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return build_real_from_int_cst (type, arg1);
1790 else if (TREE_CODE (arg1) == REAL_CST)
1791 return fold_convert_const_real_from_real (type, arg1);
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_real_from_fixed (type, arg1);
1795 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1797 if (TREE_CODE (arg1) == FIXED_CST)
1798 return fold_convert_const_fixed_from_fixed (type, arg1);
1799 else if (TREE_CODE (arg1) == INTEGER_CST)
1800 return fold_convert_const_fixed_from_int (type, arg1);
1801 else if (TREE_CODE (arg1) == REAL_CST)
1802 return fold_convert_const_fixed_from_real (type, arg1);
1807 /* Construct a vector of zero elements of vector type TYPE. */
1810 build_zero_vector (tree type)
1814 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1815 return build_vector_from_val (type, t);
1818 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1821 fold_convertible_p (const_tree type, const_tree arg)
1823 tree orig = TREE_TYPE (arg);
1828 if (TREE_CODE (arg) == ERROR_MARK
1829 || TREE_CODE (type) == ERROR_MARK
1830 || TREE_CODE (orig) == ERROR_MARK)
1833 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1836 switch (TREE_CODE (type))
1838 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1839 case POINTER_TYPE: case REFERENCE_TYPE:
1841 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1842 || TREE_CODE (orig) == OFFSET_TYPE)
1844 return (TREE_CODE (orig) == VECTOR_TYPE
1845 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1848 case FIXED_POINT_TYPE:
1852 return TREE_CODE (type) == TREE_CODE (orig);
1859 /* Convert expression ARG to type TYPE. Used by the middle-end for
1860 simple conversions in preference to calling the front-end's convert. */
1863 fold_convert_loc (location_t loc, tree type, tree arg)
1865 tree orig = TREE_TYPE (arg);
1871 if (TREE_CODE (arg) == ERROR_MARK
1872 || TREE_CODE (type) == ERROR_MARK
1873 || TREE_CODE (orig) == ERROR_MARK)
1874 return error_mark_node;
1876 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1877 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1879 switch (TREE_CODE (type))
1882 case REFERENCE_TYPE:
1883 /* Handle conversions between pointers to different address spaces. */
1884 if (POINTER_TYPE_P (orig)
1885 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1886 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1887 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1890 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1892 if (TREE_CODE (arg) == INTEGER_CST)
1894 tem = fold_convert_const (NOP_EXPR, type, arg);
1895 if (tem != NULL_TREE)
1898 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1899 || TREE_CODE (orig) == OFFSET_TYPE)
1900 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1901 if (TREE_CODE (orig) == COMPLEX_TYPE)
1902 return fold_convert_loc (loc, type,
1903 fold_build1_loc (loc, REALPART_EXPR,
1904 TREE_TYPE (orig), arg));
1905 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1906 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1907 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1910 if (TREE_CODE (arg) == INTEGER_CST)
1912 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1916 else if (TREE_CODE (arg) == REAL_CST)
1918 tem = fold_convert_const (NOP_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1922 else if (TREE_CODE (arg) == FIXED_CST)
1924 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1925 if (tem != NULL_TREE)
1929 switch (TREE_CODE (orig))
1932 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1933 case POINTER_TYPE: case REFERENCE_TYPE:
1934 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1937 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1939 case FIXED_POINT_TYPE:
1940 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1943 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert_loc (loc, type, tem);
1950 case FIXED_POINT_TYPE:
1951 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1952 || TREE_CODE (arg) == REAL_CST)
1954 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
1956 goto fold_convert_exit;
1959 switch (TREE_CODE (orig))
1961 case FIXED_POINT_TYPE:
1966 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1969 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1970 return fold_convert_loc (loc, type, tem);
1977 switch (TREE_CODE (orig))
1980 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1981 case POINTER_TYPE: case REFERENCE_TYPE:
1983 case FIXED_POINT_TYPE:
1984 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1985 fold_convert_loc (loc, TREE_TYPE (type), arg),
1986 fold_convert_loc (loc, TREE_TYPE (type),
1987 integer_zero_node));
1992 if (TREE_CODE (arg) == COMPLEX_EXPR)
1994 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1995 TREE_OPERAND (arg, 0));
1996 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1997 TREE_OPERAND (arg, 1));
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2001 arg = save_expr (arg);
2002 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2003 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2004 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2005 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2006 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2014 if (integer_zerop (arg))
2015 return build_zero_vector (type);
2016 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2017 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2018 || TREE_CODE (orig) == VECTOR_TYPE);
2019 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2022 tem = fold_ignored_result (arg);
2023 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2029 protected_set_expr_location_unshare (tem, loc);
2033 /* Return false if expr can be assumed not to be an lvalue, true
2037 maybe_lvalue_p (const_tree x)
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x))
2053 case ARRAY_RANGE_REF:
2059 case PREINCREMENT_EXPR:
2060 case PREDECREMENT_EXPR:
2062 case TRY_CATCH_EXPR:
2063 case WITH_CLEANUP_EXPR:
2072 /* Assume the worst for front-end tree codes. */
2073 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2081 /* Return an expr equal to X but certainly not valid as an lvalue. */
2084 non_lvalue_loc (location_t loc, tree x)
2086 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2091 if (! maybe_lvalue_p (x))
2093 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2096 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2097 Zero means allow extended lvalues. */
2099 int pedantic_lvalues;
2101 /* When pedantic, return an expr equal to X but certainly not valid as a
2102 pedantic lvalue. Otherwise, return X. */
2105 pedantic_non_lvalue_loc (location_t loc, tree x)
2107 if (pedantic_lvalues)
2108 return non_lvalue_loc (loc, x);
2110 return protected_set_expr_location_unshare (x, loc);
2113 /* Given a tree comparison code, return the code that is the logical inverse
2114 of the given code. It is not safe to do this for floating-point
2115 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2116 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2119 invert_tree_comparison (enum tree_code code, bool honor_nans)
2121 if (honor_nans && flag_trapping_math)
2131 return honor_nans ? UNLE_EXPR : LE_EXPR;
2133 return honor_nans ? UNLT_EXPR : LT_EXPR;
2135 return honor_nans ? UNGE_EXPR : GE_EXPR;
2137 return honor_nans ? UNGT_EXPR : GT_EXPR;
2151 return UNORDERED_EXPR;
2152 case UNORDERED_EXPR:
2153 return ORDERED_EXPR;
2159 /* Similar, but return the comparison that results if the operands are
2160 swapped. This is safe for floating-point. */
2163 swap_tree_comparison (enum tree_code code)
2170 case UNORDERED_EXPR:
2196 /* Convert a comparison tree code from an enum tree_code representation
2197 into a compcode bit-based encoding. This function is the inverse of
2198 compcode_to_comparison. */
2200 static enum comparison_code
2201 comparison_to_compcode (enum tree_code code)
2218 return COMPCODE_ORD;
2219 case UNORDERED_EXPR:
2220 return COMPCODE_UNORD;
2222 return COMPCODE_UNLT;
2224 return COMPCODE_UNEQ;
2226 return COMPCODE_UNLE;
2228 return COMPCODE_UNGT;
2230 return COMPCODE_LTGT;
2232 return COMPCODE_UNGE;
2238 /* Convert a compcode bit-based encoding of a comparison operator back
2239 to GCC's enum tree_code representation. This function is the
2240 inverse of comparison_to_compcode. */
2242 static enum tree_code
2243 compcode_to_comparison (enum comparison_code code)
2260 return ORDERED_EXPR;
2261 case COMPCODE_UNORD:
2262 return UNORDERED_EXPR;
2280 /* Return a tree for the comparison which is the combination of
2281 doing the AND or OR (depending on CODE) of the two operations LCODE
2282 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2283 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2284 if this makes the transformation invalid. */
2287 combine_comparisons (location_t loc,
2288 enum tree_code code, enum tree_code lcode,
2289 enum tree_code rcode, tree truth_type,
2290 tree ll_arg, tree lr_arg)
2292 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2293 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2294 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2299 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2300 compcode = lcompcode & rcompcode;
2303 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2304 compcode = lcompcode | rcompcode;
2313 /* Eliminate unordered comparisons, as well as LTGT and ORD
2314 which are not used unless the mode has NaNs. */
2315 compcode &= ~COMPCODE_UNORD;
2316 if (compcode == COMPCODE_LTGT)
2317 compcode = COMPCODE_NE;
2318 else if (compcode == COMPCODE_ORD)
2319 compcode = COMPCODE_TRUE;
2321 else if (flag_trapping_math)
2323 /* Check that the original operation and the optimized ones will trap
2324 under the same condition. */
2325 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2326 && (lcompcode != COMPCODE_EQ)
2327 && (lcompcode != COMPCODE_ORD);
2328 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2329 && (rcompcode != COMPCODE_EQ)
2330 && (rcompcode != COMPCODE_ORD);
2331 bool trap = (compcode & COMPCODE_UNORD) == 0
2332 && (compcode != COMPCODE_EQ)
2333 && (compcode != COMPCODE_ORD);
2335 /* In a short-circuited boolean expression the LHS might be
2336 such that the RHS, if evaluated, will never trap. For
2337 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2338 if neither x nor y is NaN. (This is a mixed blessing: for
2339 example, the expression above will never trap, hence
2340 optimizing it to x < y would be invalid). */
2341 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2342 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2345 /* If the comparison was short-circuited, and only the RHS
2346 trapped, we may now generate a spurious trap. */
2348 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2351 /* If we changed the conditions that cause a trap, we lose. */
2352 if ((ltrap || rtrap) != trap)
2356 if (compcode == COMPCODE_TRUE)
2357 return constant_boolean_node (true, truth_type);
2358 else if (compcode == COMPCODE_FALSE)
2359 return constant_boolean_node (false, truth_type);
2362 enum tree_code tcode;
2364 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2365 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2369 /* Return nonzero if two operands (typically of the same tree node)
2370 are necessarily equal. If either argument has side-effects this
2371 function returns zero. FLAGS modifies behavior as follows:
2373 If OEP_ONLY_CONST is set, only return nonzero for constants.
2374 This function tests whether the operands are indistinguishable;
2375 it does not test whether they are equal using C's == operation.
2376 The distinction is important for IEEE floating point, because
2377 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2378 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2380 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2381 even though it may hold multiple values during a function.
2382 This is because a GCC tree node guarantees that nothing else is
2383 executed between the evaluation of its "operands" (which may often
2384 be evaluated in arbitrary order). Hence if the operands themselves
2385 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2386 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2387 unset means assuming isochronic (or instantaneous) tree equivalence.
2388 Unless comparing arbitrary expression trees, such as from different
2389 statements, this flag can usually be left unset.
2391 If OEP_PURE_SAME is set, then pure functions with identical arguments
2392 are considered the same. It is used when the caller has other ways
2393 to ensure that global memory is unchanged in between. */
2396 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2398 /* If either is ERROR_MARK, they aren't equal. */
2399 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2400 || TREE_TYPE (arg0) == error_mark_node
2401 || TREE_TYPE (arg1) == error_mark_node)
2404 /* Similar, if either does not have a type (like a released SSA name),
2405 they aren't equal. */
2406 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2409 /* Check equality of integer constants before bailing out due to
2410 precision differences. */
2411 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2412 return tree_int_cst_equal (arg0, arg1);
2414 /* If both types don't have the same signedness, then we can't consider
2415 them equal. We must check this before the STRIP_NOPS calls
2416 because they may change the signedness of the arguments. As pointers
2417 strictly don't have a signedness, require either two pointers or
2418 two non-pointers as well. */
2419 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2420 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2423 /* We cannot consider pointers to different address space equal. */
2424 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2425 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2426 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2429 /* If both types don't have the same precision, then it is not safe
2431 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2437 /* In case both args are comparisons but with different comparison
2438 code, try to swap the comparison operands of one arg to produce
2439 a match and compare that variant. */
2440 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2441 && COMPARISON_CLASS_P (arg0)
2442 && COMPARISON_CLASS_P (arg1))
2444 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2446 if (TREE_CODE (arg0) == swap_code)
2447 return operand_equal_p (TREE_OPERAND (arg0, 0),
2448 TREE_OPERAND (arg1, 1), flags)
2449 && operand_equal_p (TREE_OPERAND (arg0, 1),
2450 TREE_OPERAND (arg1, 0), flags);
2453 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2454 /* This is needed for conversions and for COMPONENT_REF.
2455 Might as well play it safe and always test this. */
2456 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2457 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2458 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2461 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2462 We don't care about side effects in that case because the SAVE_EXPR
2463 takes care of that for us. In all other cases, two expressions are
2464 equal if they have no side effects. If we have two identical
2465 expressions with side effects that should be treated the same due
2466 to the only side effects being identical SAVE_EXPR's, that will
2467 be detected in the recursive calls below. */
2468 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2469 && (TREE_CODE (arg0) == SAVE_EXPR
2470 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2473 /* Next handle constant cases, those for which we can return 1 even
2474 if ONLY_CONST is set. */
2475 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2476 switch (TREE_CODE (arg0))
2479 return tree_int_cst_equal (arg0, arg1);
2482 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2483 TREE_FIXED_CST (arg1));
2486 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2487 TREE_REAL_CST (arg1)))
2491 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2493 /* If we do not distinguish between signed and unsigned zero,
2494 consider them equal. */
2495 if (real_zerop (arg0) && real_zerop (arg1))
2504 v1 = TREE_VECTOR_CST_ELTS (arg0);
2505 v2 = TREE_VECTOR_CST_ELTS (arg1);
2508 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2511 v1 = TREE_CHAIN (v1);
2512 v2 = TREE_CHAIN (v2);
2519 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2521 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2525 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2526 && ! memcmp (TREE_STRING_POINTER (arg0),
2527 TREE_STRING_POINTER (arg1),
2528 TREE_STRING_LENGTH (arg0)));
2531 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2537 if (flags & OEP_ONLY_CONST)
2540 /* Define macros to test an operand from arg0 and arg1 for equality and a
2541 variant that allows null and views null as being different from any
2542 non-null value. In the latter case, if either is null, the both
2543 must be; otherwise, do the normal comparison. */
2544 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2545 TREE_OPERAND (arg1, N), flags)
2547 #define OP_SAME_WITH_NULL(N) \
2548 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2549 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2551 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2554 /* Two conversions are equal only if signedness and modes match. */
2555 switch (TREE_CODE (arg0))
2558 case FIX_TRUNC_EXPR:
2559 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2560 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2570 case tcc_comparison:
2572 if (OP_SAME (0) && OP_SAME (1))
2575 /* For commutative ops, allow the other order. */
2576 return (commutative_tree_code (TREE_CODE (arg0))
2577 && operand_equal_p (TREE_OPERAND (arg0, 0),
2578 TREE_OPERAND (arg1, 1), flags)
2579 && operand_equal_p (TREE_OPERAND (arg0, 1),
2580 TREE_OPERAND (arg1, 0), flags));
2583 /* If either of the pointer (or reference) expressions we are
2584 dereferencing contain a side effect, these cannot be equal. */
2585 if (TREE_SIDE_EFFECTS (arg0)
2586 || TREE_SIDE_EFFECTS (arg1))
2589 switch (TREE_CODE (arg0))
2597 /* Require equal access sizes, and similar pointer types.
2598 We can have incomplete types for array references of
2599 variable-sized arrays from the Fortran frontent
2601 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2602 || (TYPE_SIZE (TREE_TYPE (arg0))
2603 && TYPE_SIZE (TREE_TYPE (arg1))
2604 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2605 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2606 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2607 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2608 && OP_SAME (0) && OP_SAME (1));
2611 case ARRAY_RANGE_REF:
2612 /* Operands 2 and 3 may be null.
2613 Compare the array index by value if it is constant first as we
2614 may have different types but same value here. */
2616 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2617 TREE_OPERAND (arg1, 1))
2619 && OP_SAME_WITH_NULL (2)
2620 && OP_SAME_WITH_NULL (3));
2623 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2624 may be NULL when we're called to compare MEM_EXPRs. */
2625 return OP_SAME_WITH_NULL (0)
2627 && OP_SAME_WITH_NULL (2);
2630 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 case tcc_expression:
2637 switch (TREE_CODE (arg0))
2640 case TRUTH_NOT_EXPR:
2643 case TRUTH_ANDIF_EXPR:
2644 case TRUTH_ORIF_EXPR:
2645 return OP_SAME (0) && OP_SAME (1);
2648 case WIDEN_MULT_PLUS_EXPR:
2649 case WIDEN_MULT_MINUS_EXPR:
2652 /* The multiplcation operands are commutative. */
2655 case TRUTH_AND_EXPR:
2657 case TRUTH_XOR_EXPR:
2658 if (OP_SAME (0) && OP_SAME (1))
2661 /* Otherwise take into account this is a commutative operation. */
2662 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2663 TREE_OPERAND (arg1, 1), flags)
2664 && operand_equal_p (TREE_OPERAND (arg0, 1),
2665 TREE_OPERAND (arg1, 0), flags));
2670 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2677 switch (TREE_CODE (arg0))
2680 /* If the CALL_EXPRs call different functions, then they
2681 clearly can not be equal. */
2682 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2687 unsigned int cef = call_expr_flags (arg0);
2688 if (flags & OEP_PURE_SAME)
2689 cef &= ECF_CONST | ECF_PURE;
2696 /* Now see if all the arguments are the same. */
2698 const_call_expr_arg_iterator iter0, iter1;
2700 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2701 a1 = first_const_call_expr_arg (arg1, &iter1);
2703 a0 = next_const_call_expr_arg (&iter0),
2704 a1 = next_const_call_expr_arg (&iter1))
2705 if (! operand_equal_p (a0, a1, flags))
2708 /* If we get here and both argument lists are exhausted
2709 then the CALL_EXPRs are equal. */
2710 return ! (a0 || a1);
2716 case tcc_declaration:
2717 /* Consider __builtin_sqrt equal to sqrt. */
2718 return (TREE_CODE (arg0) == FUNCTION_DECL
2719 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2720 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2721 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2728 #undef OP_SAME_WITH_NULL
2731 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2732 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2734 When in doubt, return 0. */
2737 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2739 int unsignedp1, unsignedpo;
2740 tree primarg0, primarg1, primother;
2741 unsigned int correct_width;
2743 if (operand_equal_p (arg0, arg1, 0))
2746 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2747 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2750 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2751 and see if the inner values are the same. This removes any
2752 signedness comparison, which doesn't matter here. */
2753 primarg0 = arg0, primarg1 = arg1;
2754 STRIP_NOPS (primarg0);
2755 STRIP_NOPS (primarg1);
2756 if (operand_equal_p (primarg0, primarg1, 0))
2759 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2760 actual comparison operand, ARG0.
2762 First throw away any conversions to wider types
2763 already present in the operands. */
2765 primarg1 = get_narrower (arg1, &unsignedp1);
2766 primother = get_narrower (other, &unsignedpo);
2768 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2769 if (unsignedp1 == unsignedpo
2770 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2771 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2773 tree type = TREE_TYPE (arg0);
2775 /* Make sure shorter operand is extended the right way
2776 to match the longer operand. */
2777 primarg1 = fold_convert (signed_or_unsigned_type_for
2778 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2780 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2787 /* See if ARG is an expression that is either a comparison or is performing
2788 arithmetic on comparisons. The comparisons must only be comparing
2789 two different values, which will be stored in *CVAL1 and *CVAL2; if
2790 they are nonzero it means that some operands have already been found.
2791 No variables may be used anywhere else in the expression except in the
2792 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2793 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2795 If this is true, return 1. Otherwise, return zero. */
2798 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2800 enum tree_code code = TREE_CODE (arg);
2801 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2803 /* We can handle some of the tcc_expression cases here. */
2804 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2806 else if (tclass == tcc_expression
2807 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2808 || code == COMPOUND_EXPR))
2809 tclass = tcc_binary;
2811 else if (tclass == tcc_expression && code == SAVE_EXPR
2812 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2814 /* If we've already found a CVAL1 or CVAL2, this expression is
2815 two complex to handle. */
2816 if (*cval1 || *cval2)
2826 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2829 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2830 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2831 cval1, cval2, save_p));
2836 case tcc_expression:
2837 if (code == COND_EXPR)
2838 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2841 cval1, cval2, save_p)
2842 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2843 cval1, cval2, save_p));
2846 case tcc_comparison:
2847 /* First see if we can handle the first operand, then the second. For
2848 the second operand, we know *CVAL1 can't be zero. It must be that
2849 one side of the comparison is each of the values; test for the
2850 case where this isn't true by failing if the two operands
2853 if (operand_equal_p (TREE_OPERAND (arg, 0),
2854 TREE_OPERAND (arg, 1), 0))
2858 *cval1 = TREE_OPERAND (arg, 0);
2859 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2861 else if (*cval2 == 0)
2862 *cval2 = TREE_OPERAND (arg, 0);
2863 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2868 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2870 else if (*cval2 == 0)
2871 *cval2 = TREE_OPERAND (arg, 1);
2872 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2884 /* ARG is a tree that is known to contain just arithmetic operations and
2885 comparisons. Evaluate the operations in the tree substituting NEW0 for
2886 any occurrence of OLD0 as an operand of a comparison and likewise for
2890 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2891 tree old1, tree new1)
2893 tree type = TREE_TYPE (arg);
2894 enum tree_code code = TREE_CODE (arg);
2895 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2897 /* We can handle some of the tcc_expression cases here. */
2898 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2900 else if (tclass == tcc_expression
2901 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2902 tclass = tcc_binary;
2907 return fold_build1_loc (loc, code, type,
2908 eval_subst (loc, TREE_OPERAND (arg, 0),
2909 old0, new0, old1, new1));
2912 return fold_build2_loc (loc, code, type,
2913 eval_subst (loc, TREE_OPERAND (arg, 0),
2914 old0, new0, old1, new1),
2915 eval_subst (loc, TREE_OPERAND (arg, 1),
2916 old0, new0, old1, new1));
2918 case tcc_expression:
2922 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2926 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2930 return fold_build3_loc (loc, code, type,
2931 eval_subst (loc, TREE_OPERAND (arg, 0),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 1),
2934 old0, new0, old1, new1),
2935 eval_subst (loc, TREE_OPERAND (arg, 2),
2936 old0, new0, old1, new1));
2940 /* Fall through - ??? */
2942 case tcc_comparison:
2944 tree arg0 = TREE_OPERAND (arg, 0);
2945 tree arg1 = TREE_OPERAND (arg, 1);
2947 /* We need to check both for exact equality and tree equality. The
2948 former will be true if the operand has a side-effect. In that
2949 case, we know the operand occurred exactly once. */
2951 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2953 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2956 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2958 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2961 return fold_build2_loc (loc, code, type, arg0, arg1);
2969 /* Return a tree for the case when the result of an expression is RESULT
2970 converted to TYPE and OMITTED was previously an operand of the expression
2971 but is now not needed (e.g., we folded OMITTED * 0).
2973 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2974 the conversion of RESULT to TYPE. */
2977 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2979 tree t = fold_convert_loc (loc, type, result);
2981 /* If the resulting operand is an empty statement, just return the omitted
2982 statement casted to void. */
2983 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2984 return build1_loc (loc, NOP_EXPR, void_type_node,
2985 fold_ignored_result (omitted));
2987 if (TREE_SIDE_EFFECTS (omitted))
2988 return build2_loc (loc, COMPOUND_EXPR, type,
2989 fold_ignored_result (omitted), t);
2991 return non_lvalue_loc (loc, t);
2994 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2997 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3000 tree t = fold_convert_loc (loc, type, result);
3002 /* If the resulting operand is an empty statement, just return the omitted
3003 statement casted to void. */
3004 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3005 return build1_loc (loc, NOP_EXPR, void_type_node,
3006 fold_ignored_result (omitted));
3008 if (TREE_SIDE_EFFECTS (omitted))
3009 return build2_loc (loc, COMPOUND_EXPR, type,
3010 fold_ignored_result (omitted), t);
3012 return pedantic_non_lvalue_loc (loc, t);
3015 /* Return a tree for the case when the result of an expression is RESULT
3016 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3017 of the expression but are now not needed.
3019 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3020 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3021 evaluated before OMITTED2. Otherwise, if neither has side effects,
3022 just do the conversion of RESULT to TYPE. */
3025 omit_two_operands_loc (location_t loc, tree type, tree result,
3026 tree omitted1, tree omitted2)
3028 tree t = fold_convert_loc (loc, type, result);
3030 if (TREE_SIDE_EFFECTS (omitted2))
3031 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3032 if (TREE_SIDE_EFFECTS (omitted1))
3033 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3035 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3039 /* Return a simplified tree node for the truth-negation of ARG. This
3040 never alters ARG itself. We assume that ARG is an operation that
3041 returns a truth value (0 or 1).
3043 FIXME: one would think we would fold the result, but it causes
3044 problems with the dominator optimizer. */
3047 fold_truth_not_expr (location_t loc, tree arg)
3049 tree type = TREE_TYPE (arg);
3050 enum tree_code code = TREE_CODE (arg);
3051 location_t loc1, loc2;
3053 /* If this is a comparison, we can simply invert it, except for
3054 floating-point non-equality comparisons, in which case we just
3055 enclose a TRUTH_NOT_EXPR around what we have. */
3057 if (TREE_CODE_CLASS (code) == tcc_comparison)
3059 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3060 if (FLOAT_TYPE_P (op_type)
3061 && flag_trapping_math
3062 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3063 && code != NE_EXPR && code != EQ_EXPR)
3066 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3067 if (code == ERROR_MARK)
3070 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3071 TREE_OPERAND (arg, 1));
3077 return constant_boolean_node (integer_zerop (arg), type);
3079 case TRUTH_AND_EXPR:
3080 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3081 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3082 if (loc1 == UNKNOWN_LOCATION)
3084 if (loc2 == UNKNOWN_LOCATION)
3086 return build2_loc (loc, TRUTH_OR_EXPR, type,
3087 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3088 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3091 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3092 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3093 if (loc1 == UNKNOWN_LOCATION)
3095 if (loc2 == UNKNOWN_LOCATION)
3097 return build2_loc (loc, TRUTH_AND_EXPR, type,
3098 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3099 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3101 case TRUTH_XOR_EXPR:
3102 /* Here we can invert either operand. We invert the first operand
3103 unless the second operand is a TRUTH_NOT_EXPR in which case our
3104 result is the XOR of the first operand with the inside of the
3105 negation of the second operand. */
3107 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3108 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3109 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3111 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3112 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3113 TREE_OPERAND (arg, 1));
3115 case TRUTH_ANDIF_EXPR:
3116 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3117 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3118 if (loc1 == UNKNOWN_LOCATION)
3120 if (loc2 == UNKNOWN_LOCATION)
3122 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3123 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3124 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3126 case TRUTH_ORIF_EXPR:
3127 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3128 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3129 if (loc1 == UNKNOWN_LOCATION)
3131 if (loc2 == UNKNOWN_LOCATION)
3133 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3134 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3135 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3137 case TRUTH_NOT_EXPR:
3138 return TREE_OPERAND (arg, 0);
3142 tree arg1 = TREE_OPERAND (arg, 1);
3143 tree arg2 = TREE_OPERAND (arg, 2);
3145 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3146 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3147 if (loc1 == UNKNOWN_LOCATION)
3149 if (loc2 == UNKNOWN_LOCATION)
3152 /* A COND_EXPR may have a throw as one operand, which
3153 then has void type. Just leave void operands
3155 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3156 VOID_TYPE_P (TREE_TYPE (arg1))
3157 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3158 VOID_TYPE_P (TREE_TYPE (arg2))
3159 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3163 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3164 if (loc1 == UNKNOWN_LOCATION)
3166 return build2_loc (loc, COMPOUND_EXPR, type,
3167 TREE_OPERAND (arg, 0),
3168 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3170 case NON_LVALUE_EXPR:
3171 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3172 if (loc1 == UNKNOWN_LOCATION)
3174 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3177 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3178 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3180 /* ... fall through ... */
3183 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3184 if (loc1 == UNKNOWN_LOCATION)
3186 return build1_loc (loc, TREE_CODE (arg), type,
3187 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3190 if (!integer_onep (TREE_OPERAND (arg, 1)))
3192 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3195 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3197 case CLEANUP_POINT_EXPR:
3198 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3199 if (loc1 == UNKNOWN_LOCATION)
3201 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3202 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3209 /* Return a simplified tree node for the truth-negation of ARG. This
3210 never alters ARG itself. We assume that ARG is an operation that
3211 returns a truth value (0 or 1).
3213 FIXME: one would think we would fold the result, but it causes
3214 problems with the dominator optimizer. */
3217 invert_truthvalue_loc (location_t loc, tree arg)
3221 if (TREE_CODE (arg) == ERROR_MARK)
3224 tem = fold_truth_not_expr (loc, arg);
3226 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3231 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3232 operands are another bit-wise operation with a common input. If so,
3233 distribute the bit operations to save an operation and possibly two if
3234 constants are involved. For example, convert
3235 (A | B) & (A | C) into A | (B & C)
3236 Further simplification will occur if B and C are constants.
3238 If this optimization cannot be done, 0 will be returned. */
3241 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3242 tree arg0, tree arg1)
3247 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3248 || TREE_CODE (arg0) == code
3249 || (TREE_CODE (arg0) != BIT_AND_EXPR
3250 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3253 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3255 common = TREE_OPERAND (arg0, 0);
3256 left = TREE_OPERAND (arg0, 1);
3257 right = TREE_OPERAND (arg1, 1);
3259 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3261 common = TREE_OPERAND (arg0, 0);
3262 left = TREE_OPERAND (arg0, 1);
3263 right = TREE_OPERAND (arg1, 0);
3265 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3267 common = TREE_OPERAND (arg0, 1);
3268 left = TREE_OPERAND (arg0, 0);
3269 right = TREE_OPERAND (arg1, 1);
3271 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3273 common = TREE_OPERAND (arg0, 1);
3274 left = TREE_OPERAND (arg0, 0);
3275 right = TREE_OPERAND (arg1, 0);
3280 common = fold_convert_loc (loc, type, common);
3281 left = fold_convert_loc (loc, type, left);
3282 right = fold_convert_loc (loc, type, right);
3283 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3284 fold_build2_loc (loc, code, type, left, right));
3287 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3288 with code CODE. This optimization is unsafe. */
3290 distribute_real_division (location_t loc, enum tree_code code, tree type,
3291 tree arg0, tree arg1)
3293 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3294 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3296 /* (A / C) +- (B / C) -> (A +- B) / C. */
3298 && operand_equal_p (TREE_OPERAND (arg0, 1),
3299 TREE_OPERAND (arg1, 1), 0))
3300 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3301 fold_build2_loc (loc, code, type,
3302 TREE_OPERAND (arg0, 0),
3303 TREE_OPERAND (arg1, 0)),
3304 TREE_OPERAND (arg0, 1));
3306 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3307 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3308 TREE_OPERAND (arg1, 0), 0)
3309 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3310 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3312 REAL_VALUE_TYPE r0, r1;
3313 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3314 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3316 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3318 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3319 real_arithmetic (&r0, code, &r0, &r1);
3320 return fold_build2_loc (loc, MULT_EXPR, type,
3321 TREE_OPERAND (arg0, 0),
3322 build_real (type, r0));
3328 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3329 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3332 make_bit_field_ref (location_t loc, tree inner, tree type,
3333 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3335 tree result, bftype;
3339 tree size = TYPE_SIZE (TREE_TYPE (inner));
3340 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3341 || POINTER_TYPE_P (TREE_TYPE (inner)))
3342 && host_integerp (size, 0)
3343 && tree_low_cst (size, 0) == bitsize)
3344 return fold_convert_loc (loc, type, inner);
3348 if (TYPE_PRECISION (bftype) != bitsize
3349 || TYPE_UNSIGNED (bftype) == !unsignedp)
3350 bftype = build_nonstandard_integer_type (bitsize, 0);
3352 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3353 size_int (bitsize), bitsize_int (bitpos));
3356 result = fold_convert_loc (loc, type, result);
3361 /* Optimize a bit-field compare.
3363 There are two cases: First is a compare against a constant and the
3364 second is a comparison of two items where the fields are at the same
3365 bit position relative to the start of a chunk (byte, halfword, word)
3366 large enough to contain it. In these cases we can avoid the shift
3367 implicit in bitfield extractions.
3369 For constants, we emit a compare of the shifted constant with the
3370 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3371 compared. For two fields at the same position, we do the ANDs with the
3372 similar mask and compare the result of the ANDs.
3374 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3375 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3376 are the left and right operands of the comparison, respectively.
3378 If the optimization described above can be done, we return the resulting
3379 tree. Otherwise we return zero. */
3382 optimize_bit_field_compare (location_t loc, enum tree_code code,
3383 tree compare_type, tree lhs, tree rhs)
3385 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3386 tree type = TREE_TYPE (lhs);
3387 tree signed_type, unsigned_type;
3388 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3389 enum machine_mode lmode, rmode, nmode;
3390 int lunsignedp, runsignedp;
3391 int lvolatilep = 0, rvolatilep = 0;
3392 tree linner, rinner = NULL_TREE;
3396 /* Get all the information about the extractions being done. If the bit size
3397 if the same as the size of the underlying object, we aren't doing an
3398 extraction at all and so can do nothing. We also don't want to
3399 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3400 then will no longer be able to replace it. */
3401 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3402 &lunsignedp, &lvolatilep, false);
3403 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3404 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3409 /* If this is not a constant, we can only do something if bit positions,
3410 sizes, and signedness are the same. */
3411 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3412 &runsignedp, &rvolatilep, false);
3414 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3415 || lunsignedp != runsignedp || offset != 0
3416 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3420 /* See if we can find a mode to refer to this field. We should be able to,
3421 but fail if we can't. */
3423 && GET_MODE_BITSIZE (lmode) > 0
3424 && flag_strict_volatile_bitfields > 0)
3427 nmode = get_best_mode (lbitsize, lbitpos,
3428 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3429 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3430 TYPE_ALIGN (TREE_TYPE (rinner))),
3431 word_mode, lvolatilep || rvolatilep);
3432 if (nmode == VOIDmode)
3435 /* Set signed and unsigned types of the precision of this mode for the
3437 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3438 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3440 /* Compute the bit position and size for the new reference and our offset
3441 within it. If the new reference is the same size as the original, we
3442 won't optimize anything, so return zero. */
3443 nbitsize = GET_MODE_BITSIZE (nmode);
3444 nbitpos = lbitpos & ~ (nbitsize - 1);
3446 if (nbitsize == lbitsize)
3449 if (BYTES_BIG_ENDIAN)
3450 lbitpos = nbitsize - lbitsize - lbitpos;
3452 /* Make the mask to be used against the extracted field. */
3453 mask = build_int_cst_type (unsigned_type, -1);
3454 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3455 mask = const_binop (RSHIFT_EXPR, mask,
3456 size_int (nbitsize - lbitsize - lbitpos));
3459 /* If not comparing with constant, just rework the comparison
3461 return fold_build2_loc (loc, code, compare_type,
3462 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3463 make_bit_field_ref (loc, linner,
3468 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3469 make_bit_field_ref (loc, rinner,
3475 /* Otherwise, we are handling the constant case. See if the constant is too
3476 big for the field. Warn and return a tree of for 0 (false) if so. We do
3477 this not only for its own sake, but to avoid having to test for this
3478 error case below. If we didn't, we might generate wrong code.
3480 For unsigned fields, the constant shifted right by the field length should
3481 be all zero. For signed fields, the high-order bits should agree with
3486 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3487 fold_convert_loc (loc,
3488 unsigned_type, rhs),
3489 size_int (lbitsize))))
3491 warning (0, "comparison is always %d due to width of bit-field",
3493 return constant_boolean_node (code == NE_EXPR, compare_type);
3498 tree tem = const_binop (RSHIFT_EXPR,
3499 fold_convert_loc (loc, signed_type, rhs),
3500 size_int (lbitsize - 1));
3501 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3503 warning (0, "comparison is always %d due to width of bit-field",
3505 return constant_boolean_node (code == NE_EXPR, compare_type);
3509 /* Single-bit compares should always be against zero. */
3510 if (lbitsize == 1 && ! integer_zerop (rhs))
3512 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3513 rhs = build_int_cst (type, 0);
3516 /* Make a new bitfield reference, shift the constant over the
3517 appropriate number of bits and mask it with the computed mask
3518 (in case this was a signed field). If we changed it, make a new one. */
3519 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3522 TREE_SIDE_EFFECTS (lhs) = 1;
3523 TREE_THIS_VOLATILE (lhs) = 1;
3526 rhs = const_binop (BIT_AND_EXPR,
3527 const_binop (LSHIFT_EXPR,
3528 fold_convert_loc (loc, unsigned_type, rhs),
3529 size_int (lbitpos)),
3532 lhs = build2_loc (loc, code, compare_type,
3533 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3537 /* Subroutine for fold_truthop: decode a field reference.
3539 If EXP is a comparison reference, we return the innermost reference.
3541 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3542 set to the starting bit number.
3544 If the innermost field can be completely contained in a mode-sized
3545 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3547 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3548 otherwise it is not changed.
3550 *PUNSIGNEDP is set to the signedness of the field.
3552 *PMASK is set to the mask used. This is either contained in a
3553 BIT_AND_EXPR or derived from the width of the field.
3555 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3557 Return 0 if this is not a component reference or is one that we can't
3558 do anything with. */
3561 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3562 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3563 int *punsignedp, int *pvolatilep,
3564 tree *pmask, tree *pand_mask)
3566 tree outer_type = 0;
3568 tree mask, inner, offset;
3570 unsigned int precision;
3572 /* All the optimizations using this function assume integer fields.
3573 There are problems with FP fields since the type_for_size call
3574 below can fail for, e.g., XFmode. */
3575 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3578 /* We are interested in the bare arrangement of bits, so strip everything
3579 that doesn't affect the machine mode. However, record the type of the
3580 outermost expression if it may matter below. */
3581 if (CONVERT_EXPR_P (exp)
3582 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3583 outer_type = TREE_TYPE (exp);
3586 if (TREE_CODE (exp) == BIT_AND_EXPR)
3588 and_mask = TREE_OPERAND (exp, 1);
3589 exp = TREE_OPERAND (exp, 0);
3590 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3591 if (TREE_CODE (and_mask) != INTEGER_CST)
3595 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3596 punsignedp, pvolatilep, false);
3597 if ((inner == exp && and_mask == 0)
3598 || *pbitsize < 0 || offset != 0
3599 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3602 /* If the number of bits in the reference is the same as the bitsize of
3603 the outer type, then the outer type gives the signedness. Otherwise
3604 (in case of a small bitfield) the signedness is unchanged. */
3605 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3606 *punsignedp = TYPE_UNSIGNED (outer_type);
3608 /* Compute the mask to access the bitfield. */
3609 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3610 precision = TYPE_PRECISION (unsigned_type);
3612 mask = build_int_cst_type (unsigned_type, -1);
3614 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3615 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3617 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3619 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3620 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3623 *pand_mask = and_mask;
3627 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3631 all_ones_mask_p (const_tree mask, int size)
3633 tree type = TREE_TYPE (mask);
3634 unsigned int precision = TYPE_PRECISION (type);
3637 tmask = build_int_cst_type (signed_type_for (type), -1);
3640 tree_int_cst_equal (mask,
3641 const_binop (RSHIFT_EXPR,
3642 const_binop (LSHIFT_EXPR, tmask,
3643 size_int (precision - size)),
3644 size_int (precision - size)));
3647 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3648 represents the sign bit of EXP's type. If EXP represents a sign
3649 or zero extension, also test VAL against the unextended type.
3650 The return value is the (sub)expression whose sign bit is VAL,
3651 or NULL_TREE otherwise. */
3654 sign_bit_p (tree exp, const_tree val)
3656 unsigned HOST_WIDE_INT mask_lo, lo;
3657 HOST_WIDE_INT mask_hi, hi;
3661 /* Tree EXP must have an integral type. */
3662 t = TREE_TYPE (exp);
3663 if (! INTEGRAL_TYPE_P (t))
3666 /* Tree VAL must be an integer constant. */
3667 if (TREE_CODE (val) != INTEGER_CST
3668 || TREE_OVERFLOW (val))
3671 width = TYPE_PRECISION (t);
3672 if (width > HOST_BITS_PER_WIDE_INT)
3674 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3677 mask_hi = ((unsigned HOST_WIDE_INT) -1
3678 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3684 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3687 mask_lo = ((unsigned HOST_WIDE_INT) -1
3688 >> (HOST_BITS_PER_WIDE_INT - width));
3691 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3692 treat VAL as if it were unsigned. */
3693 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3694 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3697 /* Handle extension from a narrower type. */
3698 if (TREE_CODE (exp) == NOP_EXPR
3699 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3700 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3705 /* Subroutine for fold_truthop: determine if an operand is simple enough
3706 to be evaluated unconditionally. */
3709 simple_operand_p (const_tree exp)
3711 /* Strip any conversions that don't change the machine mode. */
3714 return (CONSTANT_CLASS_P (exp)
3715 || TREE_CODE (exp) == SSA_NAME
3717 && ! TREE_ADDRESSABLE (exp)
3718 && ! TREE_THIS_VOLATILE (exp)
3719 && ! DECL_NONLOCAL (exp)
3720 /* Don't regard global variables as simple. They may be
3721 allocated in ways unknown to the compiler (shared memory,
3722 #pragma weak, etc). */
3723 && ! TREE_PUBLIC (exp)
3724 && ! DECL_EXTERNAL (exp)
3725 /* Loading a static variable is unduly expensive, but global
3726 registers aren't expensive. */
3727 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3730 /* The following functions are subroutines to fold_range_test and allow it to
3731 try to change a logical combination of comparisons into a range test.
3734 X == 2 || X == 3 || X == 4 || X == 5
3738 (unsigned) (X - 2) <= 3
3740 We describe each set of comparisons as being either inside or outside
3741 a range, using a variable named like IN_P, and then describe the
3742 range with a lower and upper bound. If one of the bounds is omitted,
3743 it represents either the highest or lowest value of the type.
3745 In the comments below, we represent a range by two numbers in brackets
3746 preceded by a "+" to designate being inside that range, or a "-" to
3747 designate being outside that range, so the condition can be inverted by
3748 flipping the prefix. An omitted bound is represented by a "-". For
3749 example, "- [-, 10]" means being outside the range starting at the lowest
3750 possible value and ending at 10, in other words, being greater than 10.
3751 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3754 We set up things so that the missing bounds are handled in a consistent
3755 manner so neither a missing bound nor "true" and "false" need to be
3756 handled using a special case. */
3758 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3759 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3760 and UPPER1_P are nonzero if the respective argument is an upper bound
3761 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3762 must be specified for a comparison. ARG1 will be converted to ARG0's
3763 type if both are specified. */
3766 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3767 tree arg1, int upper1_p)
3773 /* If neither arg represents infinity, do the normal operation.
3774 Else, if not a comparison, return infinity. Else handle the special
3775 comparison rules. Note that most of the cases below won't occur, but
3776 are handled for consistency. */
3778 if (arg0 != 0 && arg1 != 0)
3780 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3781 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3783 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3786 if (TREE_CODE_CLASS (code) != tcc_comparison)
3789 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3790 for neither. In real maths, we cannot assume open ended ranges are
3791 the same. But, this is computer arithmetic, where numbers are finite.
3792 We can therefore make the transformation of any unbounded range with
3793 the value Z, Z being greater than any representable number. This permits
3794 us to treat unbounded ranges as equal. */
3795 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3796 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3800 result = sgn0 == sgn1;
3803 result = sgn0 != sgn1;
3806 result = sgn0 < sgn1;
3809 result = sgn0 <= sgn1;
3812 result = sgn0 > sgn1;
3815 result = sgn0 >= sgn1;
3821 return constant_boolean_node (result, type);
3824 /* Given EXP, a logical expression, set the range it is testing into
3825 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3826 actually being tested. *PLOW and *PHIGH will be made of the same
3827 type as the returned expression. If EXP is not a comparison, we
3828 will most likely not be returning a useful value and range. Set
3829 *STRICT_OVERFLOW_P to true if the return value is only valid
3830 because signed overflow is undefined; otherwise, do not change
3831 *STRICT_OVERFLOW_P. */
3834 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3835 bool *strict_overflow_p)
3837 enum tree_code code;
3838 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3839 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3841 tree low, high, n_low, n_high;
3842 location_t loc = EXPR_LOCATION (exp);
3844 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3845 and see if we can refine the range. Some of the cases below may not
3846 happen, but it doesn't seem worth worrying about this. We "continue"
3847 the outer loop when we've changed something; otherwise we "break"
3848 the switch, which will "break" the while. */
3851 low = high = build_int_cst (TREE_TYPE (exp), 0);
3855 code = TREE_CODE (exp);
3856 exp_type = TREE_TYPE (exp);
3858 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3860 if (TREE_OPERAND_LENGTH (exp) > 0)
3861 arg0 = TREE_OPERAND (exp, 0);
3862 if (TREE_CODE_CLASS (code) == tcc_comparison
3863 || TREE_CODE_CLASS (code) == tcc_unary
3864 || TREE_CODE_CLASS (code) == tcc_binary)
3865 arg0_type = TREE_TYPE (arg0);
3866 if (TREE_CODE_CLASS (code) == tcc_binary
3867 || TREE_CODE_CLASS (code) == tcc_comparison
3868 || (TREE_CODE_CLASS (code) == tcc_expression
3869 && TREE_OPERAND_LENGTH (exp) > 1))
3870 arg1 = TREE_OPERAND (exp, 1);
3875 case TRUTH_NOT_EXPR:
3876 in_p = ! in_p, exp = arg0;
3879 case EQ_EXPR: case NE_EXPR:
3880 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3881 /* We can only do something if the range is testing for zero
3882 and if the second operand is an integer constant. Note that
3883 saying something is "in" the range we make is done by
3884 complementing IN_P since it will set in the initial case of
3885 being not equal to zero; "out" is leaving it alone. */
3886 if (low == 0 || high == 0
3887 || ! integer_zerop (low) || ! integer_zerop (high)
3888 || TREE_CODE (arg1) != INTEGER_CST)
3893 case NE_EXPR: /* - [c, c] */
3896 case EQ_EXPR: /* + [c, c] */
3897 in_p = ! in_p, low = high = arg1;
3899 case GT_EXPR: /* - [-, c] */
3900 low = 0, high = arg1;
3902 case GE_EXPR: /* + [c, -] */
3903 in_p = ! in_p, low = arg1, high = 0;
3905 case LT_EXPR: /* - [c, -] */
3906 low = arg1, high = 0;
3908 case LE_EXPR: /* + [-, c] */
3909 in_p = ! in_p, low = 0, high = arg1;
3915 /* If this is an unsigned comparison, we also know that EXP is
3916 greater than or equal to zero. We base the range tests we make
3917 on that fact, so we record it here so we can parse existing
3918 range tests. We test arg0_type since often the return type
3919 of, e.g. EQ_EXPR, is boolean. */
3920 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3922 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3924 build_int_cst (arg0_type, 0),
3928 in_p = n_in_p, low = n_low, high = n_high;
3930 /* If the high bound is missing, but we have a nonzero low
3931 bound, reverse the range so it goes from zero to the low bound
3933 if (high == 0 && low && ! integer_zerop (low))
3936 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3937 integer_one_node, 0);
3938 low = build_int_cst (arg0_type, 0);
3946 /* (-x) IN [a,b] -> x in [-b, -a] */
3947 n_low = range_binop (MINUS_EXPR, exp_type,
3948 build_int_cst (exp_type, 0),
3950 n_high = range_binop (MINUS_EXPR, exp_type,
3951 build_int_cst (exp_type, 0),
3953 if (n_high != 0 && TREE_OVERFLOW (n_high))
3959 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3960 build_int_cst (exp_type, 1));
3963 case PLUS_EXPR: case MINUS_EXPR:
3964 if (TREE_CODE (arg1) != INTEGER_CST)
3967 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3968 move a constant to the other side. */
3969 if (!TYPE_UNSIGNED (arg0_type)
3970 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3973 /* If EXP is signed, any overflow in the computation is undefined,
3974 so we don't worry about it so long as our computations on
3975 the bounds don't overflow. For unsigned, overflow is defined
3976 and this is exactly the right thing. */
3977 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3978 arg0_type, low, 0, arg1, 0);
3979 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3980 arg0_type, high, 1, arg1, 0);
3981 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3982 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3985 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3986 *strict_overflow_p = true;
3989 /* Check for an unsigned range which has wrapped around the maximum
3990 value thus making n_high < n_low, and normalize it. */
3991 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3993 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3994 integer_one_node, 0);
3995 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3996 integer_one_node, 0);
3998 /* If the range is of the form +/- [ x+1, x ], we won't
3999 be able to normalize it. But then, it represents the
4000 whole range or the empty set, so make it
4002 if (tree_int_cst_equal (n_low, low)
4003 && tree_int_cst_equal (n_high, high))
4009 low = n_low, high = n_high;
4014 CASE_CONVERT: case NON_LVALUE_EXPR:
4015 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4018 if (! INTEGRAL_TYPE_P (arg0_type)
4019 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4020 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4023 n_low = low, n_high = high;
4026 n_low = fold_convert_loc (loc, arg0_type, n_low);
4029 n_high = fold_convert_loc (loc, arg0_type, n_high);
4032 /* If we're converting arg0 from an unsigned type, to exp,
4033 a signed type, we will be doing the comparison as unsigned.
4034 The tests above have already verified that LOW and HIGH
4037 So we have to ensure that we will handle large unsigned
4038 values the same way that the current signed bounds treat
4041 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4045 /* For fixed-point modes, we need to pass the saturating flag
4046 as the 2nd parameter. */
4047 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4048 equiv_type = lang_hooks.types.type_for_mode
4049 (TYPE_MODE (arg0_type),
4050 TYPE_SATURATING (arg0_type));
4052 equiv_type = lang_hooks.types.type_for_mode
4053 (TYPE_MODE (arg0_type), 1);
4055 /* A range without an upper bound is, naturally, unbounded.
4056 Since convert would have cropped a very large value, use
4057 the max value for the destination type. */
4059 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4060 : TYPE_MAX_VALUE (arg0_type);
4062 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4063 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4064 fold_convert_loc (loc, arg0_type,
4066 build_int_cst (arg0_type, 1));
4068 /* If the low bound is specified, "and" the range with the
4069 range for which the original unsigned value will be
4073 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4074 1, n_low, n_high, 1,
4075 fold_convert_loc (loc, arg0_type,
4080 in_p = (n_in_p == in_p);
4084 /* Otherwise, "or" the range with the range of the input
4085 that will be interpreted as negative. */
4086 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4087 0, n_low, n_high, 1,
4088 fold_convert_loc (loc, arg0_type,
4093 in_p = (in_p != n_in_p);
4098 low = n_low, high = n_high;
4108 /* If EXP is a constant, we can evaluate whether this is true or false. */
4109 if (TREE_CODE (exp) == INTEGER_CST)
4111 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4113 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4119 *pin_p = in_p, *plow = low, *phigh = high;
4123 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4124 type, TYPE, return an expression to test if EXP is in (or out of, depending
4125 on IN_P) the range. Return 0 if the test couldn't be created. */
4128 build_range_check (location_t loc, tree type, tree exp, int in_p,
4129 tree low, tree high)
4131 tree etype = TREE_TYPE (exp), value;
4133 #ifdef HAVE_canonicalize_funcptr_for_compare
4134 /* Disable this optimization for function pointer expressions
4135 on targets that require function pointer canonicalization. */
4136 if (HAVE_canonicalize_funcptr_for_compare
4137 && TREE_CODE (etype) == POINTER_TYPE
4138 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4144 value = build_range_check (loc, type, exp, 1, low, high);
4146 return invert_truthvalue_loc (loc, value);
4151 if (low == 0 && high == 0)
4152 return build_int_cst (type, 1);
4155 return fold_build2_loc (loc, LE_EXPR, type, exp,
4156 fold_convert_loc (loc, etype, high));
4159 return fold_build2_loc (loc, GE_EXPR, type, exp,
4160 fold_convert_loc (loc, etype, low));
4162 if (operand_equal_p (low, high, 0))
4163 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4164 fold_convert_loc (loc, etype, low));
4166 if (integer_zerop (low))
4168 if (! TYPE_UNSIGNED (etype))
4170 etype = unsigned_type_for (etype);
4171 high = fold_convert_loc (loc, etype, high);
4172 exp = fold_convert_loc (loc, etype, exp);
4174 return build_range_check (loc, type, exp, 1, 0, high);
4177 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4178 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4180 unsigned HOST_WIDE_INT lo;
4184 prec = TYPE_PRECISION (etype);
4185 if (prec <= HOST_BITS_PER_WIDE_INT)
4188 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4192 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4193 lo = (unsigned HOST_WIDE_INT) -1;
4196 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4198 if (TYPE_UNSIGNED (etype))
4200 tree signed_etype = signed_type_for (etype);
4201 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4203 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4205 etype = signed_etype;
4206 exp = fold_convert_loc (loc, etype, exp);
4208 return fold_build2_loc (loc, GT_EXPR, type, exp,
4209 build_int_cst (etype, 0));
4213 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4214 This requires wrap-around arithmetics for the type of the expression.
4215 First make sure that arithmetics in this type is valid, then make sure
4216 that it wraps around. */
4217 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4218 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4219 TYPE_UNSIGNED (etype));
4221 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4223 tree utype, minv, maxv;
4225 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4226 for the type in question, as we rely on this here. */
4227 utype = unsigned_type_for (etype);
4228 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4229 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4230 integer_one_node, 1);
4231 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4233 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4240 high = fold_convert_loc (loc, etype, high);
4241 low = fold_convert_loc (loc, etype, low);
4242 exp = fold_convert_loc (loc, etype, exp);
4244 value = const_binop (MINUS_EXPR, high, low);
4247 if (POINTER_TYPE_P (etype))
4249 if (value != 0 && !TREE_OVERFLOW (value))
4251 low = fold_convert_loc (loc, sizetype, low);
4252 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4253 return build_range_check (loc, type,
4254 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4256 1, build_int_cst (etype, 0), value);
4261 if (value != 0 && !TREE_OVERFLOW (value))
4262 return build_range_check (loc, type,
4263 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4264 1, build_int_cst (etype, 0), value);
4269 /* Return the predecessor of VAL in its type, handling the infinite case. */
4272 range_predecessor (tree val)
4274 tree type = TREE_TYPE (val);
4276 if (INTEGRAL_TYPE_P (type)
4277 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4280 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4283 /* Return the successor of VAL in its type, handling the infinite case. */
4286 range_successor (tree val)
4288 tree type = TREE_TYPE (val);
4290 if (INTEGRAL_TYPE_P (type)
4291 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4294 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4297 /* Given two ranges, see if we can merge them into one. Return 1 if we
4298 can, 0 if we can't. Set the output range into the specified parameters. */
4301 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4302 tree high0, int in1_p, tree low1, tree high1)
4310 int lowequal = ((low0 == 0 && low1 == 0)
4311 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4312 low0, 0, low1, 0)));
4313 int highequal = ((high0 == 0 && high1 == 0)
4314 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4315 high0, 1, high1, 1)));
4317 /* Make range 0 be the range that starts first, or ends last if they
4318 start at the same value. Swap them if it isn't. */
4319 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4322 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4323 high1, 1, high0, 1))))
4325 temp = in0_p, in0_p = in1_p, in1_p = temp;
4326 tem = low0, low0 = low1, low1 = tem;
4327 tem = high0, high0 = high1, high1 = tem;
4330 /* Now flag two cases, whether the ranges are disjoint or whether the
4331 second range is totally subsumed in the first. Note that the tests
4332 below are simplified by the ones above. */
4333 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4334 high0, 1, low1, 0));
4335 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4336 high1, 1, high0, 1));
4338 /* We now have four cases, depending on whether we are including or
4339 excluding the two ranges. */
4342 /* If they don't overlap, the result is false. If the second range
4343 is a subset it is the result. Otherwise, the range is from the start
4344 of the second to the end of the first. */
4346 in_p = 0, low = high = 0;
4348 in_p = 1, low = low1, high = high1;
4350 in_p = 1, low = low1, high = high0;
4353 else if (in0_p && ! in1_p)
4355 /* If they don't overlap, the result is the first range. If they are
4356 equal, the result is false. If the second range is a subset of the
4357 first, and the ranges begin at the same place, we go from just after
4358 the end of the second range to the end of the first. If the second
4359 range is not a subset of the first, or if it is a subset and both
4360 ranges end at the same place, the range starts at the start of the
4361 first range and ends just before the second range.
4362 Otherwise, we can't describe this as a single range. */
4364 in_p = 1, low = low0, high = high0;
4365 else if (lowequal && highequal)
4366 in_p = 0, low = high = 0;
4367 else if (subset && lowequal)
4369 low = range_successor (high1);
4374 /* We are in the weird situation where high0 > high1 but
4375 high1 has no successor. Punt. */
4379 else if (! subset || highequal)
4382 high = range_predecessor (low1);
4386 /* low0 < low1 but low1 has no predecessor. Punt. */
4394 else if (! in0_p && in1_p)
4396 /* If they don't overlap, the result is the second range. If the second
4397 is a subset of the first, the result is false. Otherwise,
4398 the range starts just after the first range and ends at the
4399 end of the second. */
4401 in_p = 1, low = low1, high = high1;
4402 else if (subset || highequal)
4403 in_p = 0, low = high = 0;
4406 low = range_successor (high0);
4411 /* high1 > high0 but high0 has no successor. Punt. */
4419 /* The case where we are excluding both ranges. Here the complex case
4420 is if they don't overlap. In that case, the only time we have a
4421 range is if they are adjacent. If the second is a subset of the
4422 first, the result is the first. Otherwise, the range to exclude
4423 starts at the beginning of the first range and ends at the end of the
4427 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4428 range_successor (high0),
4430 in_p = 0, low = low0, high = high1;
4433 /* Canonicalize - [min, x] into - [-, x]. */
4434 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4435 switch (TREE_CODE (TREE_TYPE (low0)))
4438 if (TYPE_PRECISION (TREE_TYPE (low0))
4439 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4443 if (tree_int_cst_equal (low0,
4444 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4448 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4449 && integer_zerop (low0))
4456 /* Canonicalize - [x, max] into - [x, -]. */
4457 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4458 switch (TREE_CODE (TREE_TYPE (high1)))
4461 if (TYPE_PRECISION (TREE_TYPE (high1))
4462 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4466 if (tree_int_cst_equal (high1,
4467 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4471 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4472 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4474 integer_one_node, 1)))
4481 /* The ranges might be also adjacent between the maximum and
4482 minimum values of the given type. For
4483 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4484 return + [x + 1, y - 1]. */
4485 if (low0 == 0 && high1 == 0)
4487 low = range_successor (high0);
4488 high = range_predecessor (low1);
4489 if (low == 0 || high == 0)
4499 in_p = 0, low = low0, high = high0;
4501 in_p = 0, low = low0, high = high1;
4504 *pin_p = in_p, *plow = low, *phigh = high;
4509 /* Subroutine of fold, looking inside expressions of the form
4510 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4511 of the COND_EXPR. This function is being used also to optimize
4512 A op B ? C : A, by reversing the comparison first.
4514 Return a folded expression whose code is not a COND_EXPR
4515 anymore, or NULL_TREE if no folding opportunity is found. */
4518 fold_cond_expr_with_comparison (location_t loc, tree type,
4519 tree arg0, tree arg1, tree arg2)
4521 enum tree_code comp_code = TREE_CODE (arg0);
4522 tree arg00 = TREE_OPERAND (arg0, 0);
4523 tree arg01 = TREE_OPERAND (arg0, 1);
4524 tree arg1_type = TREE_TYPE (arg1);
4530 /* If we have A op 0 ? A : -A, consider applying the following
4533 A == 0? A : -A same as -A
4534 A != 0? A : -A same as A
4535 A >= 0? A : -A same as abs (A)
4536 A > 0? A : -A same as abs (A)
4537 A <= 0? A : -A same as -abs (A)
4538 A < 0? A : -A same as -abs (A)
4540 None of these transformations work for modes with signed
4541 zeros. If A is +/-0, the first two transformations will
4542 change the sign of the result (from +0 to -0, or vice
4543 versa). The last four will fix the sign of the result,
4544 even though the original expressions could be positive or
4545 negative, depending on the sign of A.
4547 Note that all these transformations are correct if A is
4548 NaN, since the two alternatives (A and -A) are also NaNs. */
4549 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4550 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4551 ? real_zerop (arg01)
4552 : integer_zerop (arg01))
4553 && ((TREE_CODE (arg2) == NEGATE_EXPR
4554 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4555 /* In the case that A is of the form X-Y, '-A' (arg2) may
4556 have already been folded to Y-X, check for that. */
4557 || (TREE_CODE (arg1) == MINUS_EXPR
4558 && TREE_CODE (arg2) == MINUS_EXPR
4559 && operand_equal_p (TREE_OPERAND (arg1, 0),
4560 TREE_OPERAND (arg2, 1), 0)
4561 && operand_equal_p (TREE_OPERAND (arg1, 1),
4562 TREE_OPERAND (arg2, 0), 0))))
4567 tem = fold_convert_loc (loc, arg1_type, arg1);
4568 return pedantic_non_lvalue_loc (loc,
4569 fold_convert_loc (loc, type,
4570 negate_expr (tem)));
4573 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4576 if (flag_trapping_math)
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert_loc (loc, signed_type_for
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4588 if (flag_trapping_math)
4592 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4593 arg1 = fold_convert_loc (loc, signed_type_for
4594 (TREE_TYPE (arg1)), arg1);
4595 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4596 return negate_expr (fold_convert_loc (loc, type, tem));
4598 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4602 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4603 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4604 both transformations are correct when A is NaN: A != 0
4605 is then true, and A == 0 is false. */
4607 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4608 && integer_zerop (arg01) && integer_zerop (arg2))
4610 if (comp_code == NE_EXPR)
4611 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4612 else if (comp_code == EQ_EXPR)
4613 return build_int_cst (type, 0);
4616 /* Try some transformations of A op B ? A : B.
4618 A == B? A : B same as B
4619 A != B? A : B same as A
4620 A >= B? A : B same as max (A, B)
4621 A > B? A : B same as max (B, A)
4622 A <= B? A : B same as min (A, B)
4623 A < B? A : B same as min (B, A)
4625 As above, these transformations don't work in the presence
4626 of signed zeros. For example, if A and B are zeros of
4627 opposite sign, the first two transformations will change
4628 the sign of the result. In the last four, the original
4629 expressions give different results for (A=+0, B=-0) and
4630 (A=-0, B=+0), but the transformed expressions do not.
4632 The first two transformations are correct if either A or B
4633 is a NaN. In the first transformation, the condition will
4634 be false, and B will indeed be chosen. In the case of the
4635 second transformation, the condition A != B will be true,
4636 and A will be chosen.
4638 The conversions to max() and min() are not correct if B is
4639 a number and A is not. The conditions in the original
4640 expressions will be false, so all four give B. The min()
4641 and max() versions would give a NaN instead. */
4642 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4643 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4644 /* Avoid these transformations if the COND_EXPR may be used
4645 as an lvalue in the C++ front-end. PR c++/19199. */
4647 || (strcmp (lang_hooks.name, "GNU C++") != 0
4648 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4649 || ! maybe_lvalue_p (arg1)
4650 || ! maybe_lvalue_p (arg2)))
4652 tree comp_op0 = arg00;
4653 tree comp_op1 = arg01;
4654 tree comp_type = TREE_TYPE (comp_op0);
4656 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4657 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4667 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4669 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4674 /* In C++ a ?: expression can be an lvalue, so put the
4675 operand which will be used if they are equal first
4676 so that we can convert this back to the
4677 corresponding COND_EXPR. */
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4680 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4681 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4682 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4683 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4684 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4685 comp_op1, comp_op0);
4686 return pedantic_non_lvalue_loc (loc,
4687 fold_convert_loc (loc, type, tem));
4694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4696 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4697 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4698 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4699 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4700 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4701 comp_op1, comp_op0);
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, tem));
4707 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4708 return pedantic_non_lvalue_loc (loc,
4709 fold_convert_loc (loc, type, arg2));
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4713 return pedantic_non_lvalue_loc (loc,
4714 fold_convert_loc (loc, type, arg1));
4717 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4722 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4723 we might still be able to simplify this. For example,
4724 if C1 is one less or one more than C2, this might have started
4725 out as a MIN or MAX and been transformed by this function.
4726 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4728 if (INTEGRAL_TYPE_P (type)
4729 && TREE_CODE (arg01) == INTEGER_CST
4730 && TREE_CODE (arg2) == INTEGER_CST)
4734 if (TREE_CODE (arg1) == INTEGER_CST)
4736 /* We can replace A with C1 in this case. */
4737 arg1 = fold_convert_loc (loc, type, arg01);
4738 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4741 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4742 MIN_EXPR, to preserve the signedness of the comparison. */
4743 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4745 && operand_equal_p (arg01,
4746 const_binop (PLUS_EXPR, arg2,
4747 build_int_cst (type, 1)),
4750 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4751 fold_convert_loc (loc, TREE_TYPE (arg00),
4753 return pedantic_non_lvalue_loc (loc,
4754 fold_convert_loc (loc, type, tem));
4759 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4761 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4763 && operand_equal_p (arg01,
4764 const_binop (MINUS_EXPR, arg2,
4765 build_int_cst (type, 1)),
4768 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4769 fold_convert_loc (loc, TREE_TYPE (arg00),
4771 return pedantic_non_lvalue_loc (loc,
4772 fold_convert_loc (loc, type, tem));
4777 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4778 MAX_EXPR, to preserve the signedness of the comparison. */
4779 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4781 && operand_equal_p (arg01,
4782 const_binop (MINUS_EXPR, arg2,
4783 build_int_cst (type, 1)),
4786 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4787 fold_convert_loc (loc, TREE_TYPE (arg00),
4789 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4794 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4795 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4797 && operand_equal_p (arg01,
4798 const_binop (PLUS_EXPR, arg2,
4799 build_int_cst (type, 1)),
4802 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4803 fold_convert_loc (loc, TREE_TYPE (arg00),
4805 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4819 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4820 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4821 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4825 /* EXP is some logical combination of boolean tests. See if we can
4826 merge it into some range test. Return the new tree if so. */
4829 fold_range_test (location_t loc, enum tree_code code, tree type,
4832 int or_op = (code == TRUTH_ORIF_EXPR
4833 || code == TRUTH_OR_EXPR);
4834 int in0_p, in1_p, in_p;
4835 tree low0, low1, low, high0, high1, high;
4836 bool strict_overflow_p = false;
4837 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4838 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4840 const char * const warnmsg = G_("assuming signed overflow does not occur "
4841 "when simplifying range test");
4843 /* If this is an OR operation, invert both sides; we will invert
4844 again at the end. */
4846 in0_p = ! in0_p, in1_p = ! in1_p;
4848 /* If both expressions are the same, if we can merge the ranges, and we
4849 can build the range test, return it or it inverted. If one of the
4850 ranges is always true or always false, consider it to be the same
4851 expression as the other. */
4852 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4853 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4855 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4857 : rhs != 0 ? rhs : integer_zero_node,
4860 if (strict_overflow_p)
4861 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4862 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4865 /* On machines where the branch cost is expensive, if this is a
4866 short-circuited branch and the underlying object on both sides
4867 is the same, make a non-short-circuit operation. */
4868 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4869 && lhs != 0 && rhs != 0
4870 && (code == TRUTH_ANDIF_EXPR
4871 || code == TRUTH_ORIF_EXPR)
4872 && operand_equal_p (lhs, rhs, 0))
4874 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4875 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4876 which cases we can't do this. */
4877 if (simple_operand_p (lhs))
4878 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4879 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4882 else if (lang_hooks.decls.global_bindings_p () == 0
4883 && ! CONTAINS_PLACEHOLDER_P (lhs))
4885 tree common = save_expr (lhs);
4887 if (0 != (lhs = build_range_check (loc, type, common,
4888 or_op ? ! in0_p : in0_p,
4890 && (0 != (rhs = build_range_check (loc, type, common,
4891 or_op ? ! in1_p : in1_p,
4894 if (strict_overflow_p)
4895 fold_overflow_warning (warnmsg,
4896 WARN_STRICT_OVERFLOW_COMPARISON);
4897 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4898 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4907 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4908 bit value. Arrange things so the extra bits will be set to zero if and
4909 only if C is signed-extended to its full width. If MASK is nonzero,
4910 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4913 unextend (tree c, int p, int unsignedp, tree mask)
4915 tree type = TREE_TYPE (c);
4916 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4919 if (p == modesize || unsignedp)
4922 /* We work by getting just the sign bit into the low-order bit, then
4923 into the high-order bit, then sign-extend. We then XOR that value
4925 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4926 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4928 /* We must use a signed type in order to get an arithmetic right shift.
4929 However, we must also avoid introducing accidental overflows, so that
4930 a subsequent call to integer_zerop will work. Hence we must
4931 do the type conversion here. At this point, the constant is either
4932 zero or one, and the conversion to a signed type can never overflow.
4933 We could get an overflow if this conversion is done anywhere else. */
4934 if (TYPE_UNSIGNED (type))
4935 temp = fold_convert (signed_type_for (type), temp);
4937 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4938 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4940 temp = const_binop (BIT_AND_EXPR, temp,
4941 fold_convert (TREE_TYPE (c), mask));
4942 /* If necessary, convert the type back to match the type of C. */
4943 if (TYPE_UNSIGNED (type))
4944 temp = fold_convert (type, temp);
4946 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4949 /* For an expression that has the form
4953 we can drop one of the inner expressions and simplify to
4957 LOC is the location of the resulting expression. OP is the inner
4958 logical operation; the left-hand side in the examples above, while CMPOP
4959 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4960 removing a condition that guards another, as in
4961 (A != NULL && A->...) || A == NULL
4962 which we must not transform. If RHS_ONLY is true, only eliminate the
4963 right-most operand of the inner logical operation. */
4966 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4969 tree type = TREE_TYPE (cmpop);
4970 enum tree_code code = TREE_CODE (cmpop);
4971 enum tree_code truthop_code = TREE_CODE (op);
4972 tree lhs = TREE_OPERAND (op, 0);
4973 tree rhs = TREE_OPERAND (op, 1);
4974 tree orig_lhs = lhs, orig_rhs = rhs;
4975 enum tree_code rhs_code = TREE_CODE (rhs);
4976 enum tree_code lhs_code = TREE_CODE (lhs);
4977 enum tree_code inv_code;
4979 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4982 if (TREE_CODE_CLASS (code) != tcc_comparison)
4985 if (rhs_code == truthop_code)
4987 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4988 if (newrhs != NULL_TREE)
4991 rhs_code = TREE_CODE (rhs);
4994 if (lhs_code == truthop_code && !rhs_only)
4996 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4997 if (newlhs != NULL_TREE)
5000 lhs_code = TREE_CODE (lhs);
5004 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5005 if (inv_code == rhs_code
5006 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5007 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5009 if (!rhs_only && inv_code == lhs_code
5010 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5011 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5013 if (rhs != orig_rhs || lhs != orig_lhs)
5014 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5019 /* Find ways of folding logical expressions of LHS and RHS:
5020 Try to merge two comparisons to the same innermost item.
5021 Look for range tests like "ch >= '0' && ch <= '9'".
5022 Look for combinations of simple terms on machines with expensive branches
5023 and evaluate the RHS unconditionally.
5025 For example, if we have p->a == 2 && p->b == 4 and we can make an
5026 object large enough to span both A and B, we can do this with a comparison
5027 against the object ANDed with the a mask.
5029 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5030 operations to do this with one comparison.
5032 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5033 function and the one above.
5035 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5036 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5038 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5041 We return the simplified tree or 0 if no optimization is possible. */
5044 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5047 /* If this is the "or" of two comparisons, we can do something if
5048 the comparisons are NE_EXPR. If this is the "and", we can do something
5049 if the comparisons are EQ_EXPR. I.e.,
5050 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5052 WANTED_CODE is this operation code. For single bit fields, we can
5053 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5054 comparison for one-bit fields. */
5056 enum tree_code wanted_code;
5057 enum tree_code lcode, rcode;
5058 tree ll_arg, lr_arg, rl_arg, rr_arg;
5059 tree ll_inner, lr_inner, rl_inner, rr_inner;
5060 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5061 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5062 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5063 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5064 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5065 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5066 enum machine_mode lnmode, rnmode;
5067 tree ll_mask, lr_mask, rl_mask, rr_mask;
5068 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5069 tree l_const, r_const;
5070 tree lntype, rntype, result;
5071 HOST_WIDE_INT first_bit, end_bit;
5073 tree orig_lhs = lhs, orig_rhs = rhs;
5074 enum tree_code orig_code = code;
5076 /* Start by getting the comparison codes. Fail if anything is volatile.
5077 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5078 it were surrounded with a NE_EXPR. */
5080 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5083 lcode = TREE_CODE (lhs);
5084 rcode = TREE_CODE (rhs);
5086 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5088 lhs = build2 (NE_EXPR, truth_type, lhs,
5089 build_int_cst (TREE_TYPE (lhs), 0));
5093 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5095 rhs = build2 (NE_EXPR, truth_type, rhs,
5096 build_int_cst (TREE_TYPE (rhs), 0));
5100 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5101 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5104 ll_arg = TREE_OPERAND (lhs, 0);
5105 lr_arg = TREE_OPERAND (lhs, 1);
5106 rl_arg = TREE_OPERAND (rhs, 0);
5107 rr_arg = TREE_OPERAND (rhs, 1);
5109 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5110 if (simple_operand_p (ll_arg)
5111 && simple_operand_p (lr_arg))
5113 if (operand_equal_p (ll_arg, rl_arg, 0)
5114 && operand_equal_p (lr_arg, rr_arg, 0))
5116 result = combine_comparisons (loc, code, lcode, rcode,
5117 truth_type, ll_arg, lr_arg);
5121 else if (operand_equal_p (ll_arg, rr_arg, 0)
5122 && operand_equal_p (lr_arg, rl_arg, 0))
5124 result = combine_comparisons (loc, code, lcode,
5125 swap_tree_comparison (rcode),
5126 truth_type, ll_arg, lr_arg);
5132 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5133 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5135 /* If the RHS can be evaluated unconditionally and its operands are
5136 simple, it wins to evaluate the RHS unconditionally on machines
5137 with expensive branches. In this case, this isn't a comparison
5138 that can be merged. Avoid doing this if the RHS is a floating-point
5139 comparison since those can trap. */
5141 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5143 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5144 && simple_operand_p (rl_arg)
5145 && simple_operand_p (rr_arg))
5147 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5148 if (code == TRUTH_OR_EXPR
5149 && lcode == NE_EXPR && integer_zerop (lr_arg)
5150 && rcode == NE_EXPR && integer_zerop (rr_arg)
5151 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5152 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5153 return build2_loc (loc, NE_EXPR, truth_type,
5154 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5156 build_int_cst (TREE_TYPE (ll_arg), 0));
5158 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5159 if (code == TRUTH_AND_EXPR
5160 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5161 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5162 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5163 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5164 return build2_loc (loc, EQ_EXPR, truth_type,
5165 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5167 build_int_cst (TREE_TYPE (ll_arg), 0));
5169 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5171 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5172 return build2_loc (loc, code, truth_type, lhs, rhs);
5177 /* See if the comparisons can be merged. Then get all the parameters for
5180 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5181 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5185 ll_inner = decode_field_reference (loc, ll_arg,
5186 &ll_bitsize, &ll_bitpos, &ll_mode,
5187 &ll_unsignedp, &volatilep, &ll_mask,
5189 lr_inner = decode_field_reference (loc, lr_arg,
5190 &lr_bitsize, &lr_bitpos, &lr_mode,
5191 &lr_unsignedp, &volatilep, &lr_mask,
5193 rl_inner = decode_field_reference (loc, rl_arg,
5194 &rl_bitsize, &rl_bitpos, &rl_mode,
5195 &rl_unsignedp, &volatilep, &rl_mask,
5197 rr_inner = decode_field_reference (loc, rr_arg,
5198 &rr_bitsize, &rr_bitpos, &rr_mode,
5199 &rr_unsignedp, &volatilep, &rr_mask,
5202 /* It must be true that the inner operation on the lhs of each
5203 comparison must be the same if we are to be able to do anything.
5204 Then see if we have constants. If not, the same must be true for
5206 if (volatilep || ll_inner == 0 || rl_inner == 0
5207 || ! operand_equal_p (ll_inner, rl_inner, 0))
5210 if (TREE_CODE (lr_arg) == INTEGER_CST
5211 && TREE_CODE (rr_arg) == INTEGER_CST)
5212 l_const = lr_arg, r_const = rr_arg;
5213 else if (lr_inner == 0 || rr_inner == 0
5214 || ! operand_equal_p (lr_inner, rr_inner, 0))
5217 l_const = r_const = 0;
5219 /* If either comparison code is not correct for our logical operation,
5220 fail. However, we can convert a one-bit comparison against zero into
5221 the opposite comparison against that bit being set in the field. */
5223 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5224 if (lcode != wanted_code)
5226 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5228 /* Make the left operand unsigned, since we are only interested
5229 in the value of one bit. Otherwise we are doing the wrong
5238 /* This is analogous to the code for l_const above. */
5239 if (rcode != wanted_code)
5241 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5250 /* See if we can find a mode that contains both fields being compared on
5251 the left. If we can't, fail. Otherwise, update all constants and masks
5252 to be relative to a field of that size. */
5253 first_bit = MIN (ll_bitpos, rl_bitpos);
5254 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5255 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5256 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5258 if (lnmode == VOIDmode)
5261 lnbitsize = GET_MODE_BITSIZE (lnmode);
5262 lnbitpos = first_bit & ~ (lnbitsize - 1);
5263 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5264 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5266 if (BYTES_BIG_ENDIAN)
5268 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5269 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5272 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5273 size_int (xll_bitpos));
5274 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5275 size_int (xrl_bitpos));
5279 l_const = fold_convert_loc (loc, lntype, l_const);
5280 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5281 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5282 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5283 fold_build1_loc (loc, BIT_NOT_EXPR,
5286 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5288 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5293 r_const = fold_convert_loc (loc, lntype, r_const);
5294 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5295 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5296 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5297 fold_build1_loc (loc, BIT_NOT_EXPR,
5300 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5302 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5306 /* If the right sides are not constant, do the same for it. Also,
5307 disallow this optimization if a size or signedness mismatch occurs
5308 between the left and right sides. */
5311 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5312 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5313 /* Make sure the two fields on the right
5314 correspond to the left without being swapped. */
5315 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5318 first_bit = MIN (lr_bitpos, rr_bitpos);
5319 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5320 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5321 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5323 if (rnmode == VOIDmode)
5326 rnbitsize = GET_MODE_BITSIZE (rnmode);
5327 rnbitpos = first_bit & ~ (rnbitsize - 1);
5328 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5329 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5331 if (BYTES_BIG_ENDIAN)
5333 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5334 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5337 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5339 size_int (xlr_bitpos));
5340 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5342 size_int (xrr_bitpos));
5344 /* Make a mask that corresponds to both fields being compared.
5345 Do this for both items being compared. If the operands are the
5346 same size and the bits being compared are in the same position
5347 then we can do this by masking both and comparing the masked
5349 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5350 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5351 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5353 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5354 ll_unsignedp || rl_unsignedp);
5355 if (! all_ones_mask_p (ll_mask, lnbitsize))
5356 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5358 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5359 lr_unsignedp || rr_unsignedp);
5360 if (! all_ones_mask_p (lr_mask, rnbitsize))
5361 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5363 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5366 /* There is still another way we can do something: If both pairs of
5367 fields being compared are adjacent, we may be able to make a wider
5368 field containing them both.
5370 Note that we still must mask the lhs/rhs expressions. Furthermore,
5371 the mask must be shifted to account for the shift done by
5372 make_bit_field_ref. */
5373 if ((ll_bitsize + ll_bitpos == rl_bitpos
5374 && lr_bitsize + lr_bitpos == rr_bitpos)
5375 || (ll_bitpos == rl_bitpos + rl_bitsize
5376 && lr_bitpos == rr_bitpos + rr_bitsize))
5380 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5381 ll_bitsize + rl_bitsize,
5382 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5383 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5384 lr_bitsize + rr_bitsize,
5385 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5387 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5388 size_int (MIN (xll_bitpos, xrl_bitpos)));
5389 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5390 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5392 /* Convert to the smaller type before masking out unwanted bits. */
5394 if (lntype != rntype)
5396 if (lnbitsize > rnbitsize)
5398 lhs = fold_convert_loc (loc, rntype, lhs);
5399 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5402 else if (lnbitsize < rnbitsize)
5404 rhs = fold_convert_loc (loc, lntype, rhs);
5405 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5410 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5411 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5413 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5414 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5416 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5422 /* Handle the case of comparisons with constants. If there is something in
5423 common between the masks, those bits of the constants must be the same.
5424 If not, the condition is always false. Test for this to avoid generating
5425 incorrect code below. */
5426 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5427 if (! integer_zerop (result)
5428 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5429 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5431 if (wanted_code == NE_EXPR)
5433 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5434 return constant_boolean_node (true, truth_type);
5438 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5439 return constant_boolean_node (false, truth_type);
5443 /* Construct the expression we will return. First get the component
5444 reference we will make. Unless the mask is all ones the width of
5445 that field, perform the mask operation. Then compare with the
5447 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5448 ll_unsignedp || rl_unsignedp);
5450 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5451 if (! all_ones_mask_p (ll_mask, lnbitsize))
5452 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5454 return build2_loc (loc, wanted_code, truth_type, result,
5455 const_binop (BIT_IOR_EXPR, l_const, r_const));
5458 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5462 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5466 enum tree_code op_code;
5469 int consts_equal, consts_lt;
5472 STRIP_SIGN_NOPS (arg0);
5474 op_code = TREE_CODE (arg0);
5475 minmax_const = TREE_OPERAND (arg0, 1);
5476 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5477 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5478 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5479 inner = TREE_OPERAND (arg0, 0);
5481 /* If something does not permit us to optimize, return the original tree. */
5482 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5483 || TREE_CODE (comp_const) != INTEGER_CST
5484 || TREE_OVERFLOW (comp_const)
5485 || TREE_CODE (minmax_const) != INTEGER_CST
5486 || TREE_OVERFLOW (minmax_const))
5489 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5490 and GT_EXPR, doing the rest with recursive calls using logical
5494 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5497 = optimize_minmax_comparison (loc,
5498 invert_tree_comparison (code, false),
5501 return invert_truthvalue_loc (loc, tem);
5507 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5508 optimize_minmax_comparison
5509 (loc, EQ_EXPR, type, arg0, comp_const),
5510 optimize_minmax_comparison
5511 (loc, GT_EXPR, type, arg0, comp_const));
5514 if (op_code == MAX_EXPR && consts_equal)
5515 /* MAX (X, 0) == 0 -> X <= 0 */
5516 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5518 else if (op_code == MAX_EXPR && consts_lt)
5519 /* MAX (X, 0) == 5 -> X == 5 */
5520 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5522 else if (op_code == MAX_EXPR)
5523 /* MAX (X, 0) == -1 -> false */
5524 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5526 else if (consts_equal)
5527 /* MIN (X, 0) == 0 -> X >= 0 */
5528 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5531 /* MIN (X, 0) == 5 -> false */
5532 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5535 /* MIN (X, 0) == -1 -> X == -1 */
5536 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5539 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5540 /* MAX (X, 0) > 0 -> X > 0
5541 MAX (X, 0) > 5 -> X > 5 */
5542 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5544 else if (op_code == MAX_EXPR)
5545 /* MAX (X, 0) > -1 -> true */
5546 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5548 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5549 /* MIN (X, 0) > 0 -> false
5550 MIN (X, 0) > 5 -> false */
5551 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5554 /* MIN (X, 0) > -1 -> X > -1 */
5555 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5562 /* T is an integer expression that is being multiplied, divided, or taken a
5563 modulus (CODE says which and what kind of divide or modulus) by a
5564 constant C. See if we can eliminate that operation by folding it with
5565 other operations already in T. WIDE_TYPE, if non-null, is a type that
5566 should be used for the computation if wider than our type.
5568 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5569 (X * 2) + (Y * 4). We must, however, be assured that either the original
5570 expression would not overflow or that overflow is undefined for the type
5571 in the language in question.
5573 If we return a non-null expression, it is an equivalent form of the
5574 original computation, but need not be in the original type.
5576 We set *STRICT_OVERFLOW_P to true if the return values depends on
5577 signed overflow being undefined. Otherwise we do not change
5578 *STRICT_OVERFLOW_P. */
5581 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5582 bool *strict_overflow_p)
5584 /* To avoid exponential search depth, refuse to allow recursion past
5585 three levels. Beyond that (1) it's highly unlikely that we'll find
5586 something interesting and (2) we've probably processed it before
5587 when we built the inner expression. */
5596 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5603 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5604 bool *strict_overflow_p)
5606 tree type = TREE_TYPE (t);
5607 enum tree_code tcode = TREE_CODE (t);
5608 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5609 > GET_MODE_SIZE (TYPE_MODE (type)))
5610 ? wide_type : type);
5612 int same_p = tcode == code;
5613 tree op0 = NULL_TREE, op1 = NULL_TREE;
5614 bool sub_strict_overflow_p;
5616 /* Don't deal with constants of zero here; they confuse the code below. */
5617 if (integer_zerop (c))
5620 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5621 op0 = TREE_OPERAND (t, 0);
5623 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5624 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5626 /* Note that we need not handle conditional operations here since fold
5627 already handles those cases. So just do arithmetic here. */
5631 /* For a constant, we can always simplify if we are a multiply
5632 or (for divide and modulus) if it is a multiple of our constant. */
5633 if (code == MULT_EXPR
5634 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5635 return const_binop (code, fold_convert (ctype, t),
5636 fold_convert (ctype, c));
5639 CASE_CONVERT: case NON_LVALUE_EXPR:
5640 /* If op0 is an expression ... */
5641 if ((COMPARISON_CLASS_P (op0)
5642 || UNARY_CLASS_P (op0)
5643 || BINARY_CLASS_P (op0)
5644 || VL_EXP_CLASS_P (op0)
5645 || EXPRESSION_CLASS_P (op0))
5646 /* ... and has wrapping overflow, and its type is smaller
5647 than ctype, then we cannot pass through as widening. */
5648 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5649 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5650 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5651 && (TYPE_PRECISION (ctype)
5652 > TYPE_PRECISION (TREE_TYPE (op0))))
5653 /* ... or this is a truncation (t is narrower than op0),
5654 then we cannot pass through this narrowing. */
5655 || (TYPE_PRECISION (type)
5656 < TYPE_PRECISION (TREE_TYPE (op0)))
5657 /* ... or signedness changes for division or modulus,
5658 then we cannot pass through this conversion. */
5659 || (code != MULT_EXPR
5660 && (TYPE_UNSIGNED (ctype)
5661 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5662 /* ... or has undefined overflow while the converted to
5663 type has not, we cannot do the operation in the inner type
5664 as that would introduce undefined overflow. */
5665 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5666 && !TYPE_OVERFLOW_UNDEFINED (type))))
5669 /* Pass the constant down and see if we can make a simplification. If
5670 we can, replace this expression with the inner simplification for
5671 possible later conversion to our or some other type. */
5672 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5673 && TREE_CODE (t2) == INTEGER_CST
5674 && !TREE_OVERFLOW (t2)
5675 && (0 != (t1 = extract_muldiv (op0, t2, code,
5677 ? ctype : NULL_TREE,
5678 strict_overflow_p))))
5683 /* If widening the type changes it from signed to unsigned, then we
5684 must avoid building ABS_EXPR itself as unsigned. */
5685 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5687 tree cstype = (*signed_type_for) (ctype);
5688 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5691 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5692 return fold_convert (ctype, t1);
5696 /* If the constant is negative, we cannot simplify this. */
5697 if (tree_int_cst_sgn (c) == -1)
5701 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5703 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5706 case MIN_EXPR: case MAX_EXPR:
5707 /* If widening the type changes the signedness, then we can't perform
5708 this optimization as that changes the result. */
5709 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5712 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5713 sub_strict_overflow_p = false;
5714 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5715 &sub_strict_overflow_p)) != 0
5716 && (t2 = extract_muldiv (op1, c, code, wide_type,
5717 &sub_strict_overflow_p)) != 0)
5719 if (tree_int_cst_sgn (c) < 0)
5720 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5721 if (sub_strict_overflow_p)
5722 *strict_overflow_p = true;
5723 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5724 fold_convert (ctype, t2));
5728 case LSHIFT_EXPR: case RSHIFT_EXPR:
5729 /* If the second operand is constant, this is a multiplication
5730 or floor division, by a power of two, so we can treat it that
5731 way unless the multiplier or divisor overflows. Signed
5732 left-shift overflow is implementation-defined rather than
5733 undefined in C90, so do not convert signed left shift into
5735 if (TREE_CODE (op1) == INTEGER_CST
5736 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5737 /* const_binop may not detect overflow correctly,
5738 so check for it explicitly here. */
5739 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5740 && TREE_INT_CST_HIGH (op1) == 0
5741 && 0 != (t1 = fold_convert (ctype,
5742 const_binop (LSHIFT_EXPR,
5745 && !TREE_OVERFLOW (t1))
5746 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5747 ? MULT_EXPR : FLOOR_DIV_EXPR,
5749 fold_convert (ctype, op0),
5751 c, code, wide_type, strict_overflow_p);
5754 case PLUS_EXPR: case MINUS_EXPR:
5755 /* See if we can eliminate the operation on both sides. If we can, we
5756 can return a new PLUS or MINUS. If we can't, the only remaining
5757 cases where we can do anything are if the second operand is a
5759 sub_strict_overflow_p = false;
5760 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5761 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5762 if (t1 != 0 && t2 != 0
5763 && (code == MULT_EXPR
5764 /* If not multiplication, we can only do this if both operands
5765 are divisible by c. */
5766 || (multiple_of_p (ctype, op0, c)
5767 && multiple_of_p (ctype, op1, c))))
5769 if (sub_strict_overflow_p)
5770 *strict_overflow_p = true;
5771 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5772 fold_convert (ctype, t2));
5775 /* If this was a subtraction, negate OP1 and set it to be an addition.
5776 This simplifies the logic below. */
5777 if (tcode == MINUS_EXPR)
5779 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5780 /* If OP1 was not easily negatable, the constant may be OP0. */
5781 if (TREE_CODE (op0) == INTEGER_CST)
5792 if (TREE_CODE (op1) != INTEGER_CST)
5795 /* If either OP1 or C are negative, this optimization is not safe for
5796 some of the division and remainder types while for others we need
5797 to change the code. */
5798 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5800 if (code == CEIL_DIV_EXPR)
5801 code = FLOOR_DIV_EXPR;
5802 else if (code == FLOOR_DIV_EXPR)
5803 code = CEIL_DIV_EXPR;
5804 else if (code != MULT_EXPR
5805 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5809 /* If it's a multiply or a division/modulus operation of a multiple
5810 of our constant, do the operation and verify it doesn't overflow. */
5811 if (code == MULT_EXPR
5812 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5814 op1 = const_binop (code, fold_convert (ctype, op1),
5815 fold_convert (ctype, c));
5816 /* We allow the constant to overflow with wrapping semantics. */
5818 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5824 /* If we have an unsigned type is not a sizetype, we cannot widen
5825 the operation since it will change the result if the original
5826 computation overflowed. */
5827 if (TYPE_UNSIGNED (ctype)
5828 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5832 /* If we were able to eliminate our operation from the first side,
5833 apply our operation to the second side and reform the PLUS. */
5834 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5835 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5837 /* The last case is if we are a multiply. In that case, we can
5838 apply the distributive law to commute the multiply and addition
5839 if the multiplication of the constants doesn't overflow. */
5840 if (code == MULT_EXPR)
5841 return fold_build2 (tcode, ctype,
5842 fold_build2 (code, ctype,
5843 fold_convert (ctype, op0),
5844 fold_convert (ctype, c)),
5850 /* We have a special case here if we are doing something like
5851 (C * 8) % 4 since we know that's zero. */
5852 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5853 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5854 /* If the multiplication can overflow we cannot optimize this.
5855 ??? Until we can properly mark individual operations as
5856 not overflowing we need to treat sizetype special here as
5857 stor-layout relies on this opimization to make
5858 DECL_FIELD_BIT_OFFSET always a constant. */
5859 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5860 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5861 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5862 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5863 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5865 *strict_overflow_p = true;
5866 return omit_one_operand (type, integer_zero_node, op0);
5869 /* ... fall through ... */
5871 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5872 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5873 /* If we can extract our operation from the LHS, do so and return a
5874 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5875 do something only if the second operand is a constant. */
5877 && (t1 = extract_muldiv (op0, c, code, wide_type,
5878 strict_overflow_p)) != 0)
5879 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5880 fold_convert (ctype, op1));
5881 else if (tcode == MULT_EXPR && code == MULT_EXPR
5882 && (t1 = extract_muldiv (op1, c, code, wide_type,
5883 strict_overflow_p)) != 0)
5884 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5885 fold_convert (ctype, t1));
5886 else if (TREE_CODE (op1) != INTEGER_CST)
5889 /* If these are the same operation types, we can associate them
5890 assuming no overflow. */
5892 && 0 != (t1 = int_const_binop (MULT_EXPR,
5893 fold_convert (ctype, op1),
5894 fold_convert (ctype, c), 1))
5895 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5896 (TYPE_UNSIGNED (ctype)
5897 && tcode != MULT_EXPR) ? -1 : 1,
5898 TREE_OVERFLOW (t1)))
5899 && !TREE_OVERFLOW (t1))
5900 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5902 /* If these operations "cancel" each other, we have the main
5903 optimizations of this pass, which occur when either constant is a
5904 multiple of the other, in which case we replace this with either an
5905 operation or CODE or TCODE.
5907 If we have an unsigned type that is not a sizetype, we cannot do
5908 this since it will change the result if the original computation
5910 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5911 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5912 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5913 || (tcode == MULT_EXPR
5914 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5915 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5916 && code != MULT_EXPR)))
5918 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5920 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5921 *strict_overflow_p = true;
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5923 fold_convert (ctype,
5924 const_binop (TRUNC_DIV_EXPR,
5927 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5929 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5930 *strict_overflow_p = true;
5931 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5932 fold_convert (ctype,
5933 const_binop (TRUNC_DIV_EXPR,
5946 /* Return a node which has the indicated constant VALUE (either 0 or
5947 1), and is of the indicated TYPE. */
5950 constant_boolean_node (int value, tree type)
5952 if (type == integer_type_node)
5953 return value ? integer_one_node : integer_zero_node;
5954 else if (type == boolean_type_node)
5955 return value ? boolean_true_node : boolean_false_node;
5957 return build_int_cst_type (type, value);
5959 return build_int_cst (NULL_TREE, value);
5963 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5964 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5965 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5966 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5967 COND is the first argument to CODE; otherwise (as in the example
5968 given here), it is the second argument. TYPE is the type of the
5969 original expression. Return NULL_TREE if no simplification is
5973 fold_binary_op_with_conditional_arg (location_t loc,
5974 enum tree_code code,
5975 tree type, tree op0, tree op1,
5976 tree cond, tree arg, int cond_first_p)
5978 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5979 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5980 tree test, true_value, false_value;
5981 tree lhs = NULL_TREE;
5982 tree rhs = NULL_TREE;
5984 if (TREE_CODE (cond) == COND_EXPR)
5986 test = TREE_OPERAND (cond, 0);
5987 true_value = TREE_OPERAND (cond, 1);
5988 false_value = TREE_OPERAND (cond, 2);
5989 /* If this operand throws an expression, then it does not make
5990 sense to try to perform a logical or arithmetic operation
5992 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5994 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5999 tree testtype = TREE_TYPE (cond);
6001 true_value = constant_boolean_node (true, testtype);
6002 false_value = constant_boolean_node (false, testtype);
6005 /* This transformation is only worthwhile if we don't have to wrap ARG
6006 in a SAVE_EXPR and the operation can be simplified on at least one
6007 of the branches once its pushed inside the COND_EXPR. */
6008 if (!TREE_CONSTANT (arg)
6009 && (TREE_SIDE_EFFECTS (arg)
6010 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6013 arg = fold_convert_loc (loc, arg_type, arg);
6016 true_value = fold_convert_loc (loc, cond_type, true_value);
6018 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6020 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6024 false_value = fold_convert_loc (loc, cond_type, false_value);
6026 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6028 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6031 /* Check that we have simplified at least one of the branches. */
6032 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6035 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6039 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6041 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6042 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6043 ADDEND is the same as X.
6045 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6046 and finite. The problematic cases are when X is zero, and its mode
6047 has signed zeros. In the case of rounding towards -infinity,
6048 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6049 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6052 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6054 if (!real_zerop (addend))
6057 /* Don't allow the fold with -fsignaling-nans. */
6058 if (HONOR_SNANS (TYPE_MODE (type)))
6061 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6062 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6065 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6066 if (TREE_CODE (addend) == REAL_CST
6067 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6070 /* The mode has signed zeros, and we have to honor their sign.
6071 In this situation, there is only one case we can return true for.
6072 X - 0 is the same as X unless rounding towards -infinity is
6074 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6077 /* Subroutine of fold() that checks comparisons of built-in math
6078 functions against real constants.
6080 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6081 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6082 is the type of the result and ARG0 and ARG1 are the operands of the
6083 comparison. ARG1 must be a TREE_REAL_CST.
6085 The function returns the constant folded tree if a simplification
6086 can be made, and NULL_TREE otherwise. */
6089 fold_mathfn_compare (location_t loc,
6090 enum built_in_function fcode, enum tree_code code,
6091 tree type, tree arg0, tree arg1)
6095 if (BUILTIN_SQRT_P (fcode))
6097 tree arg = CALL_EXPR_ARG (arg0, 0);
6098 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6100 c = TREE_REAL_CST (arg1);
6101 if (REAL_VALUE_NEGATIVE (c))
6103 /* sqrt(x) < y is always false, if y is negative. */
6104 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6105 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6107 /* sqrt(x) > y is always true, if y is negative and we
6108 don't care about NaNs, i.e. negative values of x. */
6109 if (code == NE_EXPR || !HONOR_NANS (mode))
6110 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6112 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6113 return fold_build2_loc (loc, GE_EXPR, type, arg,
6114 build_real (TREE_TYPE (arg), dconst0));
6116 else if (code == GT_EXPR || code == GE_EXPR)
6120 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6121 real_convert (&c2, mode, &c2);
6123 if (REAL_VALUE_ISINF (c2))
6125 /* sqrt(x) > y is x == +Inf, when y is very large. */
6126 if (HONOR_INFINITIES (mode))
6127 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6128 build_real (TREE_TYPE (arg), c2));
6130 /* sqrt(x) > y is always false, when y is very large
6131 and we don't care about infinities. */
6132 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6135 /* sqrt(x) > c is the same as x > c*c. */
6136 return fold_build2_loc (loc, code, type, arg,
6137 build_real (TREE_TYPE (arg), c2));
6139 else if (code == LT_EXPR || code == LE_EXPR)
6143 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6144 real_convert (&c2, mode, &c2);
6146 if (REAL_VALUE_ISINF (c2))
6148 /* sqrt(x) < y is always true, when y is a very large
6149 value and we don't care about NaNs or Infinities. */
6150 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6151 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6153 /* sqrt(x) < y is x != +Inf when y is very large and we
6154 don't care about NaNs. */
6155 if (! HONOR_NANS (mode))
6156 return fold_build2_loc (loc, NE_EXPR, type, arg,
6157 build_real (TREE_TYPE (arg), c2));
6159 /* sqrt(x) < y is x >= 0 when y is very large and we
6160 don't care about Infinities. */
6161 if (! HONOR_INFINITIES (mode))
6162 return fold_build2_loc (loc, GE_EXPR, type, arg,
6163 build_real (TREE_TYPE (arg), dconst0));
6165 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6166 if (lang_hooks.decls.global_bindings_p () != 0
6167 || CONTAINS_PLACEHOLDER_P (arg))
6170 arg = save_expr (arg);
6171 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6172 fold_build2_loc (loc, GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg),
6175 fold_build2_loc (loc, NE_EXPR, type, arg,
6176 build_real (TREE_TYPE (arg),
6180 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6181 if (! HONOR_NANS (mode))
6182 return fold_build2_loc (loc, code, type, arg,
6183 build_real (TREE_TYPE (arg), c2));
6185 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6186 if (lang_hooks.decls.global_bindings_p () == 0
6187 && ! CONTAINS_PLACEHOLDER_P (arg))
6189 arg = save_expr (arg);
6190 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6191 fold_build2_loc (loc, GE_EXPR, type, arg,
6192 build_real (TREE_TYPE (arg),
6194 fold_build2_loc (loc, code, type, arg,
6195 build_real (TREE_TYPE (arg),
6204 /* Subroutine of fold() that optimizes comparisons against Infinities,
6205 either +Inf or -Inf.
6207 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6208 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6209 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6211 The function returns the constant folded tree if a simplification
6212 can be made, and NULL_TREE otherwise. */
6215 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6216 tree arg0, tree arg1)
6218 enum machine_mode mode;
6219 REAL_VALUE_TYPE max;
6223 mode = TYPE_MODE (TREE_TYPE (arg0));
6225 /* For negative infinity swap the sense of the comparison. */
6226 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6228 code = swap_tree_comparison (code);
6233 /* x > +Inf is always false, if with ignore sNANs. */
6234 if (HONOR_SNANS (mode))
6236 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6239 /* x <= +Inf is always true, if we don't case about NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6243 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6244 if (lang_hooks.decls.global_bindings_p () == 0
6245 && ! CONTAINS_PLACEHOLDER_P (arg0))
6247 arg0 = save_expr (arg0);
6248 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6254 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6255 real_maxval (&max, neg, mode);
6256 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6257 arg0, build_real (TREE_TYPE (arg0), max));
6260 /* x < +Inf is always equal to x <= DBL_MAX. */
6261 real_maxval (&max, neg, mode);
6262 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6263 arg0, build_real (TREE_TYPE (arg0), max));
6266 /* x != +Inf is always equal to !(x > DBL_MAX). */
6267 real_maxval (&max, neg, mode);
6268 if (! HONOR_NANS (mode))
6269 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6270 arg0, build_real (TREE_TYPE (arg0), max));
6272 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6273 arg0, build_real (TREE_TYPE (arg0), max));
6274 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6283 /* Subroutine of fold() that optimizes comparisons of a division by
6284 a nonzero integer constant against an integer constant, i.e.
6287 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6288 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6289 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6291 The function returns the constant folded tree if a simplification
6292 can be made, and NULL_TREE otherwise. */
6295 fold_div_compare (location_t loc,
6296 enum tree_code code, tree type, tree arg0, tree arg1)
6298 tree prod, tmp, hi, lo;
6299 tree arg00 = TREE_OPERAND (arg0, 0);
6300 tree arg01 = TREE_OPERAND (arg0, 1);
6302 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6306 /* We have to do this the hard way to detect unsigned overflow.
6307 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6308 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6309 TREE_INT_CST_HIGH (arg01),
6310 TREE_INT_CST_LOW (arg1),
6311 TREE_INT_CST_HIGH (arg1),
6312 &val.low, &val.high, unsigned_p);
6313 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6314 neg_overflow = false;
6318 tmp = int_const_binop (MINUS_EXPR, arg01,
6319 build_int_cst (TREE_TYPE (arg01), 1), 0);
6322 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6323 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6324 TREE_INT_CST_HIGH (prod),
6325 TREE_INT_CST_LOW (tmp),
6326 TREE_INT_CST_HIGH (tmp),
6327 &val.low, &val.high, unsigned_p);
6328 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6329 -1, overflow | TREE_OVERFLOW (prod));
6331 else if (tree_int_cst_sgn (arg01) >= 0)
6333 tmp = int_const_binop (MINUS_EXPR, arg01,
6334 build_int_cst (TREE_TYPE (arg01), 1), 0);
6335 switch (tree_int_cst_sgn (arg1))
6338 neg_overflow = true;
6339 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6344 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6349 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6359 /* A negative divisor reverses the relational operators. */
6360 code = swap_tree_comparison (code);
6362 tmp = int_const_binop (PLUS_EXPR, arg01,
6363 build_int_cst (TREE_TYPE (arg01), 1), 0);
6364 switch (tree_int_cst_sgn (arg1))
6367 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6372 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6377 neg_overflow = true;
6378 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6390 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6391 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6392 if (TREE_OVERFLOW (hi))
6393 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6394 if (TREE_OVERFLOW (lo))
6395 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6396 return build_range_check (loc, type, arg00, 1, lo, hi);
6399 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6400 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6401 if (TREE_OVERFLOW (hi))
6402 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6403 if (TREE_OVERFLOW (lo))
6404 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6405 return build_range_check (loc, type, arg00, 0, lo, hi);
6408 if (TREE_OVERFLOW (lo))
6410 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6411 return omit_one_operand_loc (loc, type, tmp, arg00);
6413 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6416 if (TREE_OVERFLOW (hi))
6418 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6419 return omit_one_operand_loc (loc, type, tmp, arg00);
6421 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6424 if (TREE_OVERFLOW (hi))
6426 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6427 return omit_one_operand_loc (loc, type, tmp, arg00);
6429 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6432 if (TREE_OVERFLOW (lo))
6434 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6435 return omit_one_operand_loc (loc, type, tmp, arg00);
6437 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6447 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6448 equality/inequality test, then return a simplified form of the test
6449 using a sign testing. Otherwise return NULL. TYPE is the desired
6453 fold_single_bit_test_into_sign_test (location_t loc,
6454 enum tree_code code, tree arg0, tree arg1,
6457 /* If this is testing a single bit, we can optimize the test. */
6458 if ((code == NE_EXPR || code == EQ_EXPR)
6459 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6460 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6462 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6463 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6464 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6466 if (arg00 != NULL_TREE
6467 /* This is only a win if casting to a signed type is cheap,
6468 i.e. when arg00's type is not a partial mode. */
6469 && TYPE_PRECISION (TREE_TYPE (arg00))
6470 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6472 tree stype = signed_type_for (TREE_TYPE (arg00));
6473 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6475 fold_convert_loc (loc, stype, arg00),
6476 build_int_cst (stype, 0));
6483 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6484 equality/inequality test, then return a simplified form of
6485 the test using shifts and logical operations. Otherwise return
6486 NULL. TYPE is the desired result type. */
6489 fold_single_bit_test (location_t loc, enum tree_code code,
6490 tree arg0, tree arg1, tree result_type)
6492 /* If this is testing a single bit, we can optimize the test. */
6493 if ((code == NE_EXPR || code == EQ_EXPR)
6494 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6495 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6497 tree inner = TREE_OPERAND (arg0, 0);
6498 tree type = TREE_TYPE (arg0);
6499 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6500 enum machine_mode operand_mode = TYPE_MODE (type);
6502 tree signed_type, unsigned_type, intermediate_type;
6505 /* First, see if we can fold the single bit test into a sign-bit
6507 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6512 /* Otherwise we have (A & C) != 0 where C is a single bit,
6513 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6514 Similarly for (A & C) == 0. */
6516 /* If INNER is a right shift of a constant and it plus BITNUM does
6517 not overflow, adjust BITNUM and INNER. */
6518 if (TREE_CODE (inner) == RSHIFT_EXPR
6519 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6520 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6521 && bitnum < TYPE_PRECISION (type)
6522 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6523 bitnum - TYPE_PRECISION (type)))
6525 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6526 inner = TREE_OPERAND (inner, 0);
6529 /* If we are going to be able to omit the AND below, we must do our
6530 operations as unsigned. If we must use the AND, we have a choice.
6531 Normally unsigned is faster, but for some machines signed is. */
6532 #ifdef LOAD_EXTEND_OP
6533 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6534 && !flag_syntax_only) ? 0 : 1;
6539 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6540 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6541 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6542 inner = fold_convert_loc (loc, intermediate_type, inner);
6545 inner = build2 (RSHIFT_EXPR, intermediate_type,
6546 inner, size_int (bitnum));
6548 one = build_int_cst (intermediate_type, 1);
6550 if (code == EQ_EXPR)
6551 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6553 /* Put the AND last so it can combine with more things. */
6554 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6556 /* Make sure to return the proper type. */
6557 inner = fold_convert_loc (loc, result_type, inner);
6564 /* Check whether we are allowed to reorder operands arg0 and arg1,
6565 such that the evaluation of arg1 occurs before arg0. */
6568 reorder_operands_p (const_tree arg0, const_tree arg1)
6570 if (! flag_evaluation_order)
6572 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6574 return ! TREE_SIDE_EFFECTS (arg0)
6575 && ! TREE_SIDE_EFFECTS (arg1);
6578 /* Test whether it is preferable two swap two operands, ARG0 and
6579 ARG1, for example because ARG0 is an integer constant and ARG1
6580 isn't. If REORDER is true, only recommend swapping if we can
6581 evaluate the operands in reverse order. */
6584 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6586 STRIP_SIGN_NOPS (arg0);
6587 STRIP_SIGN_NOPS (arg1);
6589 if (TREE_CODE (arg1) == INTEGER_CST)
6591 if (TREE_CODE (arg0) == INTEGER_CST)
6594 if (TREE_CODE (arg1) == REAL_CST)
6596 if (TREE_CODE (arg0) == REAL_CST)
6599 if (TREE_CODE (arg1) == FIXED_CST)
6601 if (TREE_CODE (arg0) == FIXED_CST)
6604 if (TREE_CODE (arg1) == COMPLEX_CST)
6606 if (TREE_CODE (arg0) == COMPLEX_CST)
6609 if (TREE_CONSTANT (arg1))
6611 if (TREE_CONSTANT (arg0))
6614 if (optimize_function_for_size_p (cfun))
6617 if (reorder && flag_evaluation_order
6618 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6621 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6622 for commutative and comparison operators. Ensuring a canonical
6623 form allows the optimizers to find additional redundancies without
6624 having to explicitly check for both orderings. */
6625 if (TREE_CODE (arg0) == SSA_NAME
6626 && TREE_CODE (arg1) == SSA_NAME
6627 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6630 /* Put SSA_NAMEs last. */
6631 if (TREE_CODE (arg1) == SSA_NAME)
6633 if (TREE_CODE (arg0) == SSA_NAME)
6636 /* Put variables last. */
6645 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6646 ARG0 is extended to a wider type. */
6649 fold_widened_comparison (location_t loc, enum tree_code code,
6650 tree type, tree arg0, tree arg1)
6652 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6654 tree shorter_type, outer_type;
6658 if (arg0_unw == arg0)
6660 shorter_type = TREE_TYPE (arg0_unw);
6662 #ifdef HAVE_canonicalize_funcptr_for_compare
6663 /* Disable this optimization if we're casting a function pointer
6664 type on targets that require function pointer canonicalization. */
6665 if (HAVE_canonicalize_funcptr_for_compare
6666 && TREE_CODE (shorter_type) == POINTER_TYPE
6667 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6671 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6674 arg1_unw = get_unwidened (arg1, NULL_TREE);
6676 /* If possible, express the comparison in the shorter mode. */
6677 if ((code == EQ_EXPR || code == NE_EXPR
6678 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6679 && (TREE_TYPE (arg1_unw) == shorter_type
6680 || ((TYPE_PRECISION (shorter_type)
6681 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6682 && (TYPE_UNSIGNED (shorter_type)
6683 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6684 || (TREE_CODE (arg1_unw) == INTEGER_CST
6685 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6686 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6687 && int_fits_type_p (arg1_unw, shorter_type))))
6688 return fold_build2_loc (loc, code, type, arg0_unw,
6689 fold_convert_loc (loc, shorter_type, arg1_unw));
6691 if (TREE_CODE (arg1_unw) != INTEGER_CST
6692 || TREE_CODE (shorter_type) != INTEGER_TYPE
6693 || !int_fits_type_p (arg1_unw, shorter_type))
6696 /* If we are comparing with the integer that does not fit into the range
6697 of the shorter type, the result is known. */
6698 outer_type = TREE_TYPE (arg1_unw);
6699 min = lower_bound_in_type (outer_type, shorter_type);
6700 max = upper_bound_in_type (outer_type, shorter_type);
6702 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6704 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6711 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6716 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6722 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6724 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6729 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6731 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6740 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6741 ARG0 just the signedness is changed. */
6744 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6745 tree arg0, tree arg1)
6748 tree inner_type, outer_type;
6750 if (!CONVERT_EXPR_P (arg0))
6753 outer_type = TREE_TYPE (arg0);
6754 arg0_inner = TREE_OPERAND (arg0, 0);
6755 inner_type = TREE_TYPE (arg0_inner);
6757 #ifdef HAVE_canonicalize_funcptr_for_compare
6758 /* Disable this optimization if we're casting a function pointer
6759 type on targets that require function pointer canonicalization. */
6760 if (HAVE_canonicalize_funcptr_for_compare
6761 && TREE_CODE (inner_type) == POINTER_TYPE
6762 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6766 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6769 if (TREE_CODE (arg1) != INTEGER_CST
6770 && !(CONVERT_EXPR_P (arg1)
6771 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6774 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6775 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6780 if (TREE_CODE (arg1) == INTEGER_CST)
6781 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6782 0, TREE_OVERFLOW (arg1));
6784 arg1 = fold_convert_loc (loc, inner_type, arg1);
6786 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6789 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6790 step of the array. Reconstructs s and delta in the case of s *
6791 delta being an integer constant (and thus already folded). ADDR is
6792 the address. MULT is the multiplicative expression. If the
6793 function succeeds, the new address expression is returned.
6794 Otherwise NULL_TREE is returned. LOC is the location of the
6795 resulting expression. */
6798 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6800 tree s, delta, step;
6801 tree ref = TREE_OPERAND (addr, 0), pref;
6806 /* Strip the nops that might be added when converting op1 to sizetype. */
6809 /* Canonicalize op1 into a possibly non-constant delta
6810 and an INTEGER_CST s. */
6811 if (TREE_CODE (op1) == MULT_EXPR)
6813 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6818 if (TREE_CODE (arg0) == INTEGER_CST)
6823 else if (TREE_CODE (arg1) == INTEGER_CST)
6831 else if (TREE_CODE (op1) == INTEGER_CST)
6838 /* Simulate we are delta * 1. */
6840 s = integer_one_node;
6843 for (;; ref = TREE_OPERAND (ref, 0))
6845 if (TREE_CODE (ref) == ARRAY_REF)
6849 /* Remember if this was a multi-dimensional array. */
6850 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6853 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6856 itype = TREE_TYPE (domain);
6858 step = array_ref_element_size (ref);
6859 if (TREE_CODE (step) != INTEGER_CST)
6864 if (! tree_int_cst_equal (step, s))
6869 /* Try if delta is a multiple of step. */
6870 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6876 /* Only fold here if we can verify we do not overflow one
6877 dimension of a multi-dimensional array. */
6882 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6883 || !TYPE_MAX_VALUE (domain)
6884 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6887 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6888 fold_convert_loc (loc, itype,
6889 TREE_OPERAND (ref, 1)),
6890 fold_convert_loc (loc, itype, delta));
6892 || TREE_CODE (tmp) != INTEGER_CST
6893 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6902 if (!handled_component_p (ref))
6906 /* We found the suitable array reference. So copy everything up to it,
6907 and replace the index. */
6909 pref = TREE_OPERAND (addr, 0);
6910 ret = copy_node (pref);
6911 SET_EXPR_LOCATION (ret, loc);
6916 pref = TREE_OPERAND (pref, 0);
6917 TREE_OPERAND (pos, 0) = copy_node (pref);
6918 pos = TREE_OPERAND (pos, 0);
6921 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6922 fold_convert_loc (loc, itype,
6923 TREE_OPERAND (pos, 1)),
6924 fold_convert_loc (loc, itype, delta));
6926 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6930 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6931 means A >= Y && A != MAX, but in this case we know that
6932 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6935 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6937 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6939 if (TREE_CODE (bound) == LT_EXPR)
6940 a = TREE_OPERAND (bound, 0);
6941 else if (TREE_CODE (bound) == GT_EXPR)
6942 a = TREE_OPERAND (bound, 1);
6946 typea = TREE_TYPE (a);
6947 if (!INTEGRAL_TYPE_P (typea)
6948 && !POINTER_TYPE_P (typea))
6951 if (TREE_CODE (ineq) == LT_EXPR)
6953 a1 = TREE_OPERAND (ineq, 1);
6954 y = TREE_OPERAND (ineq, 0);
6956 else if (TREE_CODE (ineq) == GT_EXPR)
6958 a1 = TREE_OPERAND (ineq, 0);
6959 y = TREE_OPERAND (ineq, 1);
6964 if (TREE_TYPE (a1) != typea)
6967 if (POINTER_TYPE_P (typea))
6969 /* Convert the pointer types into integer before taking the difference. */
6970 tree ta = fold_convert_loc (loc, ssizetype, a);
6971 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6972 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6975 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6977 if (!diff || !integer_onep (diff))
6980 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6983 /* Fold a sum or difference of at least one multiplication.
6984 Returns the folded tree or NULL if no simplification could be made. */
6987 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6988 tree arg0, tree arg1)
6990 tree arg00, arg01, arg10, arg11;
6991 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6993 /* (A * C) +- (B * C) -> (A+-B) * C.
6994 (A * C) +- A -> A * (C+-1).
6995 We are most concerned about the case where C is a constant,
6996 but other combinations show up during loop reduction. Since
6997 it is not difficult, try all four possibilities. */
6999 if (TREE_CODE (arg0) == MULT_EXPR)
7001 arg00 = TREE_OPERAND (arg0, 0);
7002 arg01 = TREE_OPERAND (arg0, 1);
7004 else if (TREE_CODE (arg0) == INTEGER_CST)
7006 arg00 = build_one_cst (type);
7011 /* We cannot generate constant 1 for fract. */
7012 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7015 arg01 = build_one_cst (type);
7017 if (TREE_CODE (arg1) == MULT_EXPR)
7019 arg10 = TREE_OPERAND (arg1, 0);
7020 arg11 = TREE_OPERAND (arg1, 1);
7022 else if (TREE_CODE (arg1) == INTEGER_CST)
7024 arg10 = build_one_cst (type);
7025 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7026 the purpose of this canonicalization. */
7027 if (TREE_INT_CST_HIGH (arg1) == -1
7028 && negate_expr_p (arg1)
7029 && code == PLUS_EXPR)
7031 arg11 = negate_expr (arg1);
7039 /* We cannot generate constant 1 for fract. */
7040 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7043 arg11 = build_one_cst (type);
7047 if (operand_equal_p (arg01, arg11, 0))
7048 same = arg01, alt0 = arg00, alt1 = arg10;
7049 else if (operand_equal_p (arg00, arg10, 0))
7050 same = arg00, alt0 = arg01, alt1 = arg11;
7051 else if (operand_equal_p (arg00, arg11, 0))
7052 same = arg00, alt0 = arg01, alt1 = arg10;
7053 else if (operand_equal_p (arg01, arg10, 0))
7054 same = arg01, alt0 = arg00, alt1 = arg11;
7056 /* No identical multiplicands; see if we can find a common
7057 power-of-two factor in non-power-of-two multiplies. This
7058 can help in multi-dimensional array access. */
7059 else if (host_integerp (arg01, 0)
7060 && host_integerp (arg11, 0))
7062 HOST_WIDE_INT int01, int11, tmp;
7065 int01 = TREE_INT_CST_LOW (arg01);
7066 int11 = TREE_INT_CST_LOW (arg11);
7068 /* Move min of absolute values to int11. */
7069 if ((int01 >= 0 ? int01 : -int01)
7070 < (int11 >= 0 ? int11 : -int11))
7072 tmp = int01, int01 = int11, int11 = tmp;
7073 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7080 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7081 /* The remainder should not be a constant, otherwise we
7082 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7083 increased the number of multiplications necessary. */
7084 && TREE_CODE (arg10) != INTEGER_CST)
7086 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7087 build_int_cst (TREE_TYPE (arg00),
7092 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7097 return fold_build2_loc (loc, MULT_EXPR, type,
7098 fold_build2_loc (loc, code, type,
7099 fold_convert_loc (loc, type, alt0),
7100 fold_convert_loc (loc, type, alt1)),
7101 fold_convert_loc (loc, type, same));
7106 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7107 specified by EXPR into the buffer PTR of length LEN bytes.
7108 Return the number of bytes placed in the buffer, or zero
7112 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7114 tree type = TREE_TYPE (expr);
7115 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7116 int byte, offset, word, words;
7117 unsigned char value;
7119 if (total_bytes > len)
7121 words = total_bytes / UNITS_PER_WORD;
7123 for (byte = 0; byte < total_bytes; byte++)
7125 int bitpos = byte * BITS_PER_UNIT;
7126 if (bitpos < HOST_BITS_PER_WIDE_INT)
7127 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7129 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7130 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7132 if (total_bytes > UNITS_PER_WORD)
7134 word = byte / UNITS_PER_WORD;
7135 if (WORDS_BIG_ENDIAN)
7136 word = (words - 1) - word;
7137 offset = word * UNITS_PER_WORD;
7138 if (BYTES_BIG_ENDIAN)
7139 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7141 offset += byte % UNITS_PER_WORD;
7144 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7145 ptr[offset] = value;
7151 /* Subroutine of native_encode_expr. Encode the REAL_CST
7152 specified by EXPR into the buffer PTR of length LEN bytes.
7153 Return the number of bytes placed in the buffer, or zero
7157 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7159 tree type = TREE_TYPE (expr);
7160 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7161 int byte, offset, word, words, bitpos;
7162 unsigned char value;
7164 /* There are always 32 bits in each long, no matter the size of
7165 the hosts long. We handle floating point representations with
7169 if (total_bytes > len)
7171 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7173 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7175 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7176 bitpos += BITS_PER_UNIT)
7178 byte = (bitpos / BITS_PER_UNIT) & 3;
7179 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7181 if (UNITS_PER_WORD < 4)
7183 word = byte / UNITS_PER_WORD;
7184 if (WORDS_BIG_ENDIAN)
7185 word = (words - 1) - word;
7186 offset = word * UNITS_PER_WORD;
7187 if (BYTES_BIG_ENDIAN)
7188 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7190 offset += byte % UNITS_PER_WORD;
7193 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7194 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7199 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7200 specified by EXPR into the buffer PTR of length LEN bytes.
7201 Return the number of bytes placed in the buffer, or zero
7205 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7210 part = TREE_REALPART (expr);
7211 rsize = native_encode_expr (part, ptr, len);
7214 part = TREE_IMAGPART (expr);
7215 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7218 return rsize + isize;
7222 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7223 specified by EXPR into the buffer PTR of length LEN bytes.
7224 Return the number of bytes placed in the buffer, or zero
7228 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7230 int i, size, offset, count;
7231 tree itype, elem, elements;
7234 elements = TREE_VECTOR_CST_ELTS (expr);
7235 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7236 itype = TREE_TYPE (TREE_TYPE (expr));
7237 size = GET_MODE_SIZE (TYPE_MODE (itype));
7238 for (i = 0; i < count; i++)
7242 elem = TREE_VALUE (elements);
7243 elements = TREE_CHAIN (elements);
7250 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7255 if (offset + size > len)
7257 memset (ptr+offset, 0, size);
7265 /* Subroutine of native_encode_expr. Encode the STRING_CST
7266 specified by EXPR into the buffer PTR of length LEN bytes.
7267 Return the number of bytes placed in the buffer, or zero
7271 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7273 tree type = TREE_TYPE (expr);
7274 HOST_WIDE_INT total_bytes;
7276 if (TREE_CODE (type) != ARRAY_TYPE
7277 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7278 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7279 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7281 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7282 if (total_bytes > len)
7284 if (TREE_STRING_LENGTH (expr) < total_bytes)
7286 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7287 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7288 total_bytes - TREE_STRING_LENGTH (expr));
7291 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7296 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7297 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7298 buffer PTR of length LEN bytes. Return the number of bytes
7299 placed in the buffer, or zero upon failure. */
7302 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7304 switch (TREE_CODE (expr))
7307 return native_encode_int (expr, ptr, len);
7310 return native_encode_real (expr, ptr, len);
7313 return native_encode_complex (expr, ptr, len);
7316 return native_encode_vector (expr, ptr, len);
7319 return native_encode_string (expr, ptr, len);
7327 /* Subroutine of native_interpret_expr. Interpret the contents of
7328 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7329 If the buffer cannot be interpreted, return NULL_TREE. */
7332 native_interpret_int (tree type, const unsigned char *ptr, int len)
7334 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7335 int byte, offset, word, words;
7336 unsigned char value;
7339 if (total_bytes > len)
7341 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7344 result = double_int_zero;
7345 words = total_bytes / UNITS_PER_WORD;
7347 for (byte = 0; byte < total_bytes; byte++)
7349 int bitpos = byte * BITS_PER_UNIT;
7350 if (total_bytes > UNITS_PER_WORD)
7352 word = byte / UNITS_PER_WORD;
7353 if (WORDS_BIG_ENDIAN)
7354 word = (words - 1) - word;
7355 offset = word * UNITS_PER_WORD;
7356 if (BYTES_BIG_ENDIAN)
7357 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7359 offset += byte % UNITS_PER_WORD;
7362 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7363 value = ptr[offset];
7365 if (bitpos < HOST_BITS_PER_WIDE_INT)
7366 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7368 result.high |= (unsigned HOST_WIDE_INT) value
7369 << (bitpos - HOST_BITS_PER_WIDE_INT);
7372 return double_int_to_tree (type, result);
7376 /* Subroutine of native_interpret_expr. Interpret the contents of
7377 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7378 If the buffer cannot be interpreted, return NULL_TREE. */
7381 native_interpret_real (tree type, const unsigned char *ptr, int len)
7383 enum machine_mode mode = TYPE_MODE (type);
7384 int total_bytes = GET_MODE_SIZE (mode);
7385 int byte, offset, word, words, bitpos;
7386 unsigned char value;
7387 /* There are always 32 bits in each long, no matter the size of
7388 the hosts long. We handle floating point representations with
7393 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7394 if (total_bytes > len || total_bytes > 24)
7396 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7398 memset (tmp, 0, sizeof (tmp));
7399 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7400 bitpos += BITS_PER_UNIT)
7402 byte = (bitpos / BITS_PER_UNIT) & 3;
7403 if (UNITS_PER_WORD < 4)
7405 word = byte / UNITS_PER_WORD;
7406 if (WORDS_BIG_ENDIAN)
7407 word = (words - 1) - word;
7408 offset = word * UNITS_PER_WORD;
7409 if (BYTES_BIG_ENDIAN)
7410 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7412 offset += byte % UNITS_PER_WORD;
7415 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7416 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7418 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7421 real_from_target (&r, tmp, mode);
7422 return build_real (type, r);
7426 /* Subroutine of native_interpret_expr. Interpret the contents of
7427 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7428 If the buffer cannot be interpreted, return NULL_TREE. */
7431 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7433 tree etype, rpart, ipart;
7436 etype = TREE_TYPE (type);
7437 size = GET_MODE_SIZE (TYPE_MODE (etype));
7440 rpart = native_interpret_expr (etype, ptr, size);
7443 ipart = native_interpret_expr (etype, ptr+size, size);
7446 return build_complex (type, rpart, ipart);
7450 /* Subroutine of native_interpret_expr. Interpret the contents of
7451 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7452 If the buffer cannot be interpreted, return NULL_TREE. */
7455 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7457 tree etype, elem, elements;
7460 etype = TREE_TYPE (type);
7461 size = GET_MODE_SIZE (TYPE_MODE (etype));
7462 count = TYPE_VECTOR_SUBPARTS (type);
7463 if (size * count > len)
7466 elements = NULL_TREE;
7467 for (i = count - 1; i >= 0; i--)
7469 elem = native_interpret_expr (etype, ptr+(i*size), size);
7472 elements = tree_cons (NULL_TREE, elem, elements);
7474 return build_vector (type, elements);
7478 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7479 the buffer PTR of length LEN as a constant of type TYPE. For
7480 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7481 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7482 return NULL_TREE. */
7485 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7487 switch (TREE_CODE (type))
7492 return native_interpret_int (type, ptr, len);
7495 return native_interpret_real (type, ptr, len);
7498 return native_interpret_complex (type, ptr, len);
7501 return native_interpret_vector (type, ptr, len);
7509 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7510 TYPE at compile-time. If we're unable to perform the conversion
7511 return NULL_TREE. */
7514 fold_view_convert_expr (tree type, tree expr)
7516 /* We support up to 512-bit values (for V8DFmode). */
7517 unsigned char buffer[64];
7520 /* Check that the host and target are sane. */
7521 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7524 len = native_encode_expr (expr, buffer, sizeof (buffer));
7528 return native_interpret_expr (type, buffer, len);
7531 /* Build an expression for the address of T. Folds away INDIRECT_REF
7532 to avoid confusing the gimplify process. */
7535 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7537 /* The size of the object is not relevant when talking about its address. */
7538 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7539 t = TREE_OPERAND (t, 0);
7541 if (TREE_CODE (t) == INDIRECT_REF)
7543 t = TREE_OPERAND (t, 0);
7545 if (TREE_TYPE (t) != ptrtype)
7546 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7548 else if (TREE_CODE (t) == MEM_REF
7549 && integer_zerop (TREE_OPERAND (t, 1)))
7550 return TREE_OPERAND (t, 0);
7551 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7553 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7555 if (TREE_TYPE (t) != ptrtype)
7556 t = fold_convert_loc (loc, ptrtype, t);
7559 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7564 /* Build an expression for the address of T. */
7567 build_fold_addr_expr_loc (location_t loc, tree t)
7569 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7571 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7574 /* Fold a unary expression of code CODE and type TYPE with operand
7575 OP0. Return the folded expression if folding is successful.
7576 Otherwise, return NULL_TREE. */
7579 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7583 enum tree_code_class kind = TREE_CODE_CLASS (code);
7585 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7586 && TREE_CODE_LENGTH (code) == 1);
7591 if (CONVERT_EXPR_CODE_P (code)
7592 || code == FLOAT_EXPR || code == ABS_EXPR)
7594 /* Don't use STRIP_NOPS, because signedness of argument type
7596 STRIP_SIGN_NOPS (arg0);
7600 /* Strip any conversions that don't change the mode. This
7601 is safe for every expression, except for a comparison
7602 expression because its signedness is derived from its
7605 Note that this is done as an internal manipulation within
7606 the constant folder, in order to find the simplest
7607 representation of the arguments so that their form can be
7608 studied. In any cases, the appropriate type conversions
7609 should be put back in the tree that will get out of the
7615 if (TREE_CODE_CLASS (code) == tcc_unary)
7617 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7618 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7619 fold_build1_loc (loc, code, type,
7620 fold_convert_loc (loc, TREE_TYPE (op0),
7621 TREE_OPERAND (arg0, 1))));
7622 else if (TREE_CODE (arg0) == COND_EXPR)
7624 tree arg01 = TREE_OPERAND (arg0, 1);
7625 tree arg02 = TREE_OPERAND (arg0, 2);
7626 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7627 arg01 = fold_build1_loc (loc, code, type,
7628 fold_convert_loc (loc,
7629 TREE_TYPE (op0), arg01));
7630 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7631 arg02 = fold_build1_loc (loc, code, type,
7632 fold_convert_loc (loc,
7633 TREE_TYPE (op0), arg02));
7634 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7637 /* If this was a conversion, and all we did was to move into
7638 inside the COND_EXPR, bring it back out. But leave it if
7639 it is a conversion from integer to integer and the
7640 result precision is no wider than a word since such a
7641 conversion is cheap and may be optimized away by combine,
7642 while it couldn't if it were outside the COND_EXPR. Then return
7643 so we don't get into an infinite recursion loop taking the
7644 conversion out and then back in. */
7646 if ((CONVERT_EXPR_CODE_P (code)
7647 || code == NON_LVALUE_EXPR)
7648 && TREE_CODE (tem) == COND_EXPR
7649 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7650 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7651 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7652 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7653 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7654 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7655 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7657 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7658 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7659 || flag_syntax_only))
7660 tem = build1_loc (loc, code, type,
7662 TREE_TYPE (TREE_OPERAND
7663 (TREE_OPERAND (tem, 1), 0)),
7664 TREE_OPERAND (tem, 0),
7665 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7666 TREE_OPERAND (TREE_OPERAND (tem, 2),
7670 else if (COMPARISON_CLASS_P (arg0))
7672 if (TREE_CODE (type) == BOOLEAN_TYPE)
7674 arg0 = copy_node (arg0);
7675 TREE_TYPE (arg0) = type;
7678 else if (TREE_CODE (type) != INTEGER_TYPE)
7679 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7680 fold_build1_loc (loc, code, type,
7682 fold_build1_loc (loc, code, type,
7683 integer_zero_node));
7690 /* Re-association barriers around constants and other re-association
7691 barriers can be removed. */
7692 if (CONSTANT_CLASS_P (op0)
7693 || TREE_CODE (op0) == PAREN_EXPR)
7694 return fold_convert_loc (loc, type, op0);
7699 case FIX_TRUNC_EXPR:
7700 if (TREE_TYPE (op0) == type)
7703 /* If we have (type) (a CMP b) and type is an integral type, return
7704 new expression involving the new type. */
7705 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7706 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7707 TREE_OPERAND (op0, 1));
7709 /* Handle cases of two conversions in a row. */
7710 if (CONVERT_EXPR_P (op0))
7712 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7713 tree inter_type = TREE_TYPE (op0);
7714 int inside_int = INTEGRAL_TYPE_P (inside_type);
7715 int inside_ptr = POINTER_TYPE_P (inside_type);
7716 int inside_float = FLOAT_TYPE_P (inside_type);
7717 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7718 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7719 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7720 int inter_int = INTEGRAL_TYPE_P (inter_type);
7721 int inter_ptr = POINTER_TYPE_P (inter_type);
7722 int inter_float = FLOAT_TYPE_P (inter_type);
7723 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7724 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7725 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7726 int final_int = INTEGRAL_TYPE_P (type);
7727 int final_ptr = POINTER_TYPE_P (type);
7728 int final_float = FLOAT_TYPE_P (type);
7729 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7730 unsigned int final_prec = TYPE_PRECISION (type);
7731 int final_unsignedp = TYPE_UNSIGNED (type);
7733 /* In addition to the cases of two conversions in a row
7734 handled below, if we are converting something to its own
7735 type via an object of identical or wider precision, neither
7736 conversion is needed. */
7737 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7738 && (((inter_int || inter_ptr) && final_int)
7739 || (inter_float && final_float))
7740 && inter_prec >= final_prec)
7741 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7743 /* Likewise, if the intermediate and initial types are either both
7744 float or both integer, we don't need the middle conversion if the
7745 former is wider than the latter and doesn't change the signedness
7746 (for integers). Avoid this if the final type is a pointer since
7747 then we sometimes need the middle conversion. Likewise if the
7748 final type has a precision not equal to the size of its mode. */
7749 if (((inter_int && inside_int)
7750 || (inter_float && inside_float)
7751 || (inter_vec && inside_vec))
7752 && inter_prec >= inside_prec
7753 && (inter_float || inter_vec
7754 || inter_unsignedp == inside_unsignedp)
7755 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7756 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7758 && (! final_vec || inter_prec == inside_prec))
7759 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7761 /* If we have a sign-extension of a zero-extended value, we can
7762 replace that by a single zero-extension. */
7763 if (inside_int && inter_int && final_int
7764 && inside_prec < inter_prec && inter_prec < final_prec
7765 && inside_unsignedp && !inter_unsignedp)
7766 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7768 /* Two conversions in a row are not needed unless:
7769 - some conversion is floating-point (overstrict for now), or
7770 - some conversion is a vector (overstrict for now), or
7771 - the intermediate type is narrower than both initial and
7773 - the intermediate type and innermost type differ in signedness,
7774 and the outermost type is wider than the intermediate, or
7775 - the initial type is a pointer type and the precisions of the
7776 intermediate and final types differ, or
7777 - the final type is a pointer type and the precisions of the
7778 initial and intermediate types differ. */
7779 if (! inside_float && ! inter_float && ! final_float
7780 && ! inside_vec && ! inter_vec && ! final_vec
7781 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7782 && ! (inside_int && inter_int
7783 && inter_unsignedp != inside_unsignedp
7784 && inter_prec < final_prec)
7785 && ((inter_unsignedp && inter_prec > inside_prec)
7786 == (final_unsignedp && final_prec > inter_prec))
7787 && ! (inside_ptr && inter_prec != final_prec)
7788 && ! (final_ptr && inside_prec != inter_prec)
7789 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7790 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7791 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7794 /* Handle (T *)&A.B.C for A being of type T and B and C
7795 living at offset zero. This occurs frequently in
7796 C++ upcasting and then accessing the base. */
7797 if (TREE_CODE (op0) == ADDR_EXPR
7798 && POINTER_TYPE_P (type)
7799 && handled_component_p (TREE_OPERAND (op0, 0)))
7801 HOST_WIDE_INT bitsize, bitpos;
7803 enum machine_mode mode;
7804 int unsignedp, volatilep;
7805 tree base = TREE_OPERAND (op0, 0);
7806 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7807 &mode, &unsignedp, &volatilep, false);
7808 /* If the reference was to a (constant) zero offset, we can use
7809 the address of the base if it has the same base type
7810 as the result type and the pointer type is unqualified. */
7811 if (! offset && bitpos == 0
7812 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7813 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7814 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7815 return fold_convert_loc (loc, type,
7816 build_fold_addr_expr_loc (loc, base));
7819 if (TREE_CODE (op0) == MODIFY_EXPR
7820 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7821 /* Detect assigning a bitfield. */
7822 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7824 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7826 /* Don't leave an assignment inside a conversion
7827 unless assigning a bitfield. */
7828 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7829 /* First do the assignment, then return converted constant. */
7830 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7831 TREE_NO_WARNING (tem) = 1;
7832 TREE_USED (tem) = 1;
7836 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7837 constants (if x has signed type, the sign bit cannot be set
7838 in c). This folds extension into the BIT_AND_EXPR.
7839 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7840 very likely don't have maximal range for their precision and this
7841 transformation effectively doesn't preserve non-maximal ranges. */
7842 if (TREE_CODE (type) == INTEGER_TYPE
7843 && TREE_CODE (op0) == BIT_AND_EXPR
7844 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7846 tree and_expr = op0;
7847 tree and0 = TREE_OPERAND (and_expr, 0);
7848 tree and1 = TREE_OPERAND (and_expr, 1);
7851 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7852 || (TYPE_PRECISION (type)
7853 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7855 else if (TYPE_PRECISION (TREE_TYPE (and1))
7856 <= HOST_BITS_PER_WIDE_INT
7857 && host_integerp (and1, 1))
7859 unsigned HOST_WIDE_INT cst;
7861 cst = tree_low_cst (and1, 1);
7862 cst &= (HOST_WIDE_INT) -1
7863 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7864 change = (cst == 0);
7865 #ifdef LOAD_EXTEND_OP
7867 && !flag_syntax_only
7868 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7871 tree uns = unsigned_type_for (TREE_TYPE (and0));
7872 and0 = fold_convert_loc (loc, uns, and0);
7873 and1 = fold_convert_loc (loc, uns, and1);
7879 tem = force_fit_type_double (type, tree_to_double_int (and1),
7880 0, TREE_OVERFLOW (and1));
7881 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7882 fold_convert_loc (loc, type, and0), tem);
7886 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7887 when one of the new casts will fold away. Conservatively we assume
7888 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7889 if (POINTER_TYPE_P (type)
7890 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7891 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7892 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7893 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7895 tree arg00 = TREE_OPERAND (arg0, 0);
7896 tree arg01 = TREE_OPERAND (arg0, 1);
7898 return fold_build2_loc (loc,
7899 TREE_CODE (arg0), type,
7900 fold_convert_loc (loc, type, arg00),
7901 fold_convert_loc (loc, sizetype, arg01));
7904 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7905 of the same precision, and X is an integer type not narrower than
7906 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7907 if (INTEGRAL_TYPE_P (type)
7908 && TREE_CODE (op0) == BIT_NOT_EXPR
7909 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7910 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7911 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7913 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7914 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7915 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7916 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7917 fold_convert_loc (loc, type, tem));
7920 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7921 type of X and Y (integer types only). */
7922 if (INTEGRAL_TYPE_P (type)
7923 && TREE_CODE (op0) == MULT_EXPR
7924 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7925 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7927 /* Be careful not to introduce new overflows. */
7929 if (TYPE_OVERFLOW_WRAPS (type))
7932 mult_type = unsigned_type_for (type);
7934 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7936 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7937 fold_convert_loc (loc, mult_type,
7938 TREE_OPERAND (op0, 0)),
7939 fold_convert_loc (loc, mult_type,
7940 TREE_OPERAND (op0, 1)));
7941 return fold_convert_loc (loc, type, tem);
7945 tem = fold_convert_const (code, type, op0);
7946 return tem ? tem : NULL_TREE;
7948 case ADDR_SPACE_CONVERT_EXPR:
7949 if (integer_zerop (arg0))
7950 return fold_convert_const (code, type, arg0);
7953 case FIXED_CONVERT_EXPR:
7954 tem = fold_convert_const (code, type, arg0);
7955 return tem ? tem : NULL_TREE;
7957 case VIEW_CONVERT_EXPR:
7958 if (TREE_TYPE (op0) == type)
7960 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7961 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7962 type, TREE_OPERAND (op0, 0));
7963 if (TREE_CODE (op0) == MEM_REF)
7964 return fold_build2_loc (loc, MEM_REF, type,
7965 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7967 /* For integral conversions with the same precision or pointer
7968 conversions use a NOP_EXPR instead. */
7969 if ((INTEGRAL_TYPE_P (type)
7970 || POINTER_TYPE_P (type))
7971 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7972 || POINTER_TYPE_P (TREE_TYPE (op0)))
7973 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7974 return fold_convert_loc (loc, type, op0);
7976 /* Strip inner integral conversions that do not change the precision. */
7977 if (CONVERT_EXPR_P (op0)
7978 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7979 || POINTER_TYPE_P (TREE_TYPE (op0)))
7980 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7981 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7982 && (TYPE_PRECISION (TREE_TYPE (op0))
7983 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7984 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7985 type, TREE_OPERAND (op0, 0));
7987 return fold_view_convert_expr (type, op0);
7990 tem = fold_negate_expr (loc, arg0);
7992 return fold_convert_loc (loc, type, tem);
7996 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7997 return fold_abs_const (arg0, type);
7998 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7999 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8000 /* Convert fabs((double)float) into (double)fabsf(float). */
8001 else if (TREE_CODE (arg0) == NOP_EXPR
8002 && TREE_CODE (type) == REAL_TYPE)
8004 tree targ0 = strip_float_extensions (arg0);
8006 return fold_convert_loc (loc, type,
8007 fold_build1_loc (loc, ABS_EXPR,
8011 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8012 else if (TREE_CODE (arg0) == ABS_EXPR)
8014 else if (tree_expr_nonnegative_p (arg0))
8017 /* Strip sign ops from argument. */
8018 if (TREE_CODE (type) == REAL_TYPE)
8020 tem = fold_strip_sign_ops (arg0);
8022 return fold_build1_loc (loc, ABS_EXPR, type,
8023 fold_convert_loc (loc, type, tem));
8028 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8029 return fold_convert_loc (loc, type, arg0);
8030 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8032 tree itype = TREE_TYPE (type);
8033 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8034 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8035 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8036 negate_expr (ipart));
8038 if (TREE_CODE (arg0) == COMPLEX_CST)
8040 tree itype = TREE_TYPE (type);
8041 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8042 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8043 return build_complex (type, rpart, negate_expr (ipart));
8045 if (TREE_CODE (arg0) == CONJ_EXPR)
8046 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8050 if (TREE_CODE (arg0) == INTEGER_CST)
8051 return fold_not_const (arg0, type);
8052 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8053 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8054 /* Convert ~ (-A) to A - 1. */
8055 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8056 return fold_build2_loc (loc, MINUS_EXPR, type,
8057 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8058 build_int_cst (type, 1));
8059 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8060 else if (INTEGRAL_TYPE_P (type)
8061 && ((TREE_CODE (arg0) == MINUS_EXPR
8062 && integer_onep (TREE_OPERAND (arg0, 1)))
8063 || (TREE_CODE (arg0) == PLUS_EXPR
8064 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8065 return fold_build1_loc (loc, NEGATE_EXPR, type,
8066 fold_convert_loc (loc, type,
8067 TREE_OPERAND (arg0, 0)));
8068 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8069 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8070 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8071 fold_convert_loc (loc, type,
8072 TREE_OPERAND (arg0, 0)))))
8073 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8074 fold_convert_loc (loc, type,
8075 TREE_OPERAND (arg0, 1)));
8076 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8077 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8078 fold_convert_loc (loc, type,
8079 TREE_OPERAND (arg0, 1)))))
8080 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8081 fold_convert_loc (loc, type,
8082 TREE_OPERAND (arg0, 0)), tem);
8083 /* Perform BIT_NOT_EXPR on each element individually. */
8084 else if (TREE_CODE (arg0) == VECTOR_CST)
8086 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8087 int count = TYPE_VECTOR_SUBPARTS (type), i;
8089 for (i = 0; i < count; i++)
8093 elem = TREE_VALUE (elements);
8094 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8095 if (elem == NULL_TREE)
8097 elements = TREE_CHAIN (elements);
8100 elem = build_int_cst (TREE_TYPE (type), -1);
8101 list = tree_cons (NULL_TREE, elem, list);
8104 return build_vector (type, nreverse (list));
8109 case TRUTH_NOT_EXPR:
8110 /* The argument to invert_truthvalue must have Boolean type. */
8111 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8112 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8114 /* Note that the operand of this must be an int
8115 and its values must be 0 or 1.
8116 ("true" is a fixed value perhaps depending on the language,
8117 but we don't handle values other than 1 correctly yet.) */
8118 tem = fold_truth_not_expr (loc, arg0);
8121 return fold_convert_loc (loc, type, tem);
8124 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8125 return fold_convert_loc (loc, type, arg0);
8126 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8127 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8128 TREE_OPERAND (arg0, 1));
8129 if (TREE_CODE (arg0) == COMPLEX_CST)
8130 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8131 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8133 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8134 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8135 fold_build1_loc (loc, REALPART_EXPR, itype,
8136 TREE_OPERAND (arg0, 0)),
8137 fold_build1_loc (loc, REALPART_EXPR, itype,
8138 TREE_OPERAND (arg0, 1)));
8139 return fold_convert_loc (loc, type, tem);
8141 if (TREE_CODE (arg0) == CONJ_EXPR)
8143 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8144 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8145 TREE_OPERAND (arg0, 0));
8146 return fold_convert_loc (loc, type, tem);
8148 if (TREE_CODE (arg0) == CALL_EXPR)
8150 tree fn = get_callee_fndecl (arg0);
8151 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8152 switch (DECL_FUNCTION_CODE (fn))
8154 CASE_FLT_FN (BUILT_IN_CEXPI):
8155 fn = mathfn_built_in (type, BUILT_IN_COS);
8157 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8167 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8168 return build_zero_cst (type);
8169 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8170 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8171 TREE_OPERAND (arg0, 0));
8172 if (TREE_CODE (arg0) == COMPLEX_CST)
8173 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8174 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8176 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8177 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8178 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8179 TREE_OPERAND (arg0, 0)),
8180 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8181 TREE_OPERAND (arg0, 1)));
8182 return fold_convert_loc (loc, type, tem);
8184 if (TREE_CODE (arg0) == CONJ_EXPR)
8186 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8187 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8188 return fold_convert_loc (loc, type, negate_expr (tem));
8190 if (TREE_CODE (arg0) == CALL_EXPR)
8192 tree fn = get_callee_fndecl (arg0);
8193 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8194 switch (DECL_FUNCTION_CODE (fn))
8196 CASE_FLT_FN (BUILT_IN_CEXPI):
8197 fn = mathfn_built_in (type, BUILT_IN_SIN);
8199 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8209 /* Fold *&X to X if X is an lvalue. */
8210 if (TREE_CODE (op0) == ADDR_EXPR)
8212 tree op00 = TREE_OPERAND (op0, 0);
8213 if ((TREE_CODE (op00) == VAR_DECL
8214 || TREE_CODE (op00) == PARM_DECL
8215 || TREE_CODE (op00) == RESULT_DECL)
8216 && !TREE_READONLY (op00))
8223 } /* switch (code) */
8227 /* If the operation was a conversion do _not_ mark a resulting constant
8228 with TREE_OVERFLOW if the original constant was not. These conversions
8229 have implementation defined behavior and retaining the TREE_OVERFLOW
8230 flag here would confuse later passes such as VRP. */
8232 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8233 tree type, tree op0)
8235 tree res = fold_unary_loc (loc, code, type, op0);
8237 && TREE_CODE (res) == INTEGER_CST
8238 && TREE_CODE (op0) == INTEGER_CST
8239 && CONVERT_EXPR_CODE_P (code))
8240 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8245 /* Fold a binary expression of code CODE and type TYPE with operands
8246 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8247 Return the folded expression if folding is successful. Otherwise,
8248 return NULL_TREE. */
8251 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8253 enum tree_code compl_code;
8255 if (code == MIN_EXPR)
8256 compl_code = MAX_EXPR;
8257 else if (code == MAX_EXPR)
8258 compl_code = MIN_EXPR;
8262 /* MIN (MAX (a, b), b) == b. */
8263 if (TREE_CODE (op0) == compl_code
8264 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8265 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8267 /* MIN (MAX (b, a), b) == b. */
8268 if (TREE_CODE (op0) == compl_code
8269 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8270 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8271 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8273 /* MIN (a, MAX (a, b)) == a. */
8274 if (TREE_CODE (op1) == compl_code
8275 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8276 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8277 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8279 /* MIN (a, MAX (b, a)) == a. */
8280 if (TREE_CODE (op1) == compl_code
8281 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8282 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8283 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8288 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8289 by changing CODE to reduce the magnitude of constants involved in
8290 ARG0 of the comparison.
8291 Returns a canonicalized comparison tree if a simplification was
8292 possible, otherwise returns NULL_TREE.
8293 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8294 valid if signed overflow is undefined. */
8297 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8298 tree arg0, tree arg1,
8299 bool *strict_overflow_p)
8301 enum tree_code code0 = TREE_CODE (arg0);
8302 tree t, cst0 = NULL_TREE;
8306 /* Match A +- CST code arg1 and CST code arg1. We can change the
8307 first form only if overflow is undefined. */
8308 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8309 /* In principle pointers also have undefined overflow behavior,
8310 but that causes problems elsewhere. */
8311 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8312 && (code0 == MINUS_EXPR
8313 || code0 == PLUS_EXPR)
8314 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8315 || code0 == INTEGER_CST))
8318 /* Identify the constant in arg0 and its sign. */
8319 if (code0 == INTEGER_CST)
8322 cst0 = TREE_OPERAND (arg0, 1);
8323 sgn0 = tree_int_cst_sgn (cst0);
8325 /* Overflowed constants and zero will cause problems. */
8326 if (integer_zerop (cst0)
8327 || TREE_OVERFLOW (cst0))
8330 /* See if we can reduce the magnitude of the constant in
8331 arg0 by changing the comparison code. */
8332 if (code0 == INTEGER_CST)
8334 /* CST <= arg1 -> CST-1 < arg1. */
8335 if (code == LE_EXPR && sgn0 == 1)
8337 /* -CST < arg1 -> -CST-1 <= arg1. */
8338 else if (code == LT_EXPR && sgn0 == -1)
8340 /* CST > arg1 -> CST-1 >= arg1. */
8341 else if (code == GT_EXPR && sgn0 == 1)
8343 /* -CST >= arg1 -> -CST-1 > arg1. */
8344 else if (code == GE_EXPR && sgn0 == -1)
8348 /* arg1 code' CST' might be more canonical. */
8353 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8355 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8357 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8358 else if (code == GT_EXPR
8359 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8361 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8362 else if (code == LE_EXPR
8363 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8365 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8366 else if (code == GE_EXPR
8367 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8371 *strict_overflow_p = true;
8374 /* Now build the constant reduced in magnitude. But not if that
8375 would produce one outside of its types range. */
8376 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8378 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8379 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8381 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8382 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8383 /* We cannot swap the comparison here as that would cause us to
8384 endlessly recurse. */
8387 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8388 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8389 if (code0 != INTEGER_CST)
8390 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8392 /* If swapping might yield to a more canonical form, do so. */
8394 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8396 return fold_build2_loc (loc, code, type, t, arg1);
8399 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8400 overflow further. Try to decrease the magnitude of constants involved
8401 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8402 and put sole constants at the second argument position.
8403 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8406 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8407 tree arg0, tree arg1)
8410 bool strict_overflow_p;
8411 const char * const warnmsg = G_("assuming signed overflow does not occur "
8412 "when reducing constant in comparison");
8414 /* Try canonicalization by simplifying arg0. */
8415 strict_overflow_p = false;
8416 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8417 &strict_overflow_p);
8420 if (strict_overflow_p)
8421 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8425 /* Try canonicalization by simplifying arg1 using the swapped
8427 code = swap_tree_comparison (code);
8428 strict_overflow_p = false;
8429 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8430 &strict_overflow_p);
8431 if (t && strict_overflow_p)
8432 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8436 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8437 space. This is used to avoid issuing overflow warnings for
8438 expressions like &p->x which can not wrap. */
8441 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8443 unsigned HOST_WIDE_INT offset_low, total_low;
8444 HOST_WIDE_INT size, offset_high, total_high;
8446 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8452 if (offset == NULL_TREE)
8457 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8461 offset_low = TREE_INT_CST_LOW (offset);
8462 offset_high = TREE_INT_CST_HIGH (offset);
8465 if (add_double_with_sign (offset_low, offset_high,
8466 bitpos / BITS_PER_UNIT, 0,
8467 &total_low, &total_high,
8471 if (total_high != 0)
8474 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8478 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8480 if (TREE_CODE (base) == ADDR_EXPR)
8482 HOST_WIDE_INT base_size;
8484 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8485 if (base_size > 0 && size < base_size)
8489 return total_low > (unsigned HOST_WIDE_INT) size;
8492 /* Subroutine of fold_binary. This routine performs all of the
8493 transformations that are common to the equality/inequality
8494 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8495 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8496 fold_binary should call fold_binary. Fold a comparison with
8497 tree code CODE and type TYPE with operands OP0 and OP1. Return
8498 the folded comparison or NULL_TREE. */
8501 fold_comparison (location_t loc, enum tree_code code, tree type,
8504 tree arg0, arg1, tem;
8509 STRIP_SIGN_NOPS (arg0);
8510 STRIP_SIGN_NOPS (arg1);
8512 tem = fold_relational_const (code, type, arg0, arg1);
8513 if (tem != NULL_TREE)
8516 /* If one arg is a real or integer constant, put it last. */
8517 if (tree_swap_operands_p (arg0, arg1, true))
8518 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8520 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8521 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8522 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8523 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8524 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8525 && (TREE_CODE (arg1) == INTEGER_CST
8526 && !TREE_OVERFLOW (arg1)))
8528 tree const1 = TREE_OPERAND (arg0, 1);
8530 tree variable = TREE_OPERAND (arg0, 0);
8533 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8535 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8536 TREE_TYPE (arg1), const2, const1);
8538 /* If the constant operation overflowed this can be
8539 simplified as a comparison against INT_MAX/INT_MIN. */
8540 if (TREE_CODE (lhs) == INTEGER_CST
8541 && TREE_OVERFLOW (lhs))
8543 int const1_sgn = tree_int_cst_sgn (const1);
8544 enum tree_code code2 = code;
8546 /* Get the sign of the constant on the lhs if the
8547 operation were VARIABLE + CONST1. */
8548 if (TREE_CODE (arg0) == MINUS_EXPR)
8549 const1_sgn = -const1_sgn;
8551 /* The sign of the constant determines if we overflowed
8552 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8553 Canonicalize to the INT_MIN overflow by swapping the comparison
8555 if (const1_sgn == -1)
8556 code2 = swap_tree_comparison (code);
8558 /* We now can look at the canonicalized case
8559 VARIABLE + 1 CODE2 INT_MIN
8560 and decide on the result. */
8561 if (code2 == LT_EXPR
8563 || code2 == EQ_EXPR)
8564 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8565 else if (code2 == NE_EXPR
8567 || code2 == GT_EXPR)
8568 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8571 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8572 && (TREE_CODE (lhs) != INTEGER_CST
8573 || !TREE_OVERFLOW (lhs)))
8575 if (code != EQ_EXPR && code != NE_EXPR)
8576 fold_overflow_warning ("assuming signed overflow does not occur "
8577 "when changing X +- C1 cmp C2 to "
8579 WARN_STRICT_OVERFLOW_COMPARISON);
8580 return fold_build2_loc (loc, code, type, variable, lhs);
8584 /* For comparisons of pointers we can decompose it to a compile time
8585 comparison of the base objects and the offsets into the object.
8586 This requires at least one operand being an ADDR_EXPR or a
8587 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8588 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8589 && (TREE_CODE (arg0) == ADDR_EXPR
8590 || TREE_CODE (arg1) == ADDR_EXPR
8591 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8592 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8594 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8595 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8596 enum machine_mode mode;
8597 int volatilep, unsignedp;
8598 bool indirect_base0 = false, indirect_base1 = false;
8600 /* Get base and offset for the access. Strip ADDR_EXPR for
8601 get_inner_reference, but put it back by stripping INDIRECT_REF
8602 off the base object if possible. indirect_baseN will be true
8603 if baseN is not an address but refers to the object itself. */
8605 if (TREE_CODE (arg0) == ADDR_EXPR)
8607 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8608 &bitsize, &bitpos0, &offset0, &mode,
8609 &unsignedp, &volatilep, false);
8610 if (TREE_CODE (base0) == INDIRECT_REF)
8611 base0 = TREE_OPERAND (base0, 0);
8613 indirect_base0 = true;
8615 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8617 base0 = TREE_OPERAND (arg0, 0);
8618 STRIP_SIGN_NOPS (base0);
8619 if (TREE_CODE (base0) == ADDR_EXPR)
8621 base0 = TREE_OPERAND (base0, 0);
8622 indirect_base0 = true;
8624 offset0 = TREE_OPERAND (arg0, 1);
8628 if (TREE_CODE (arg1) == ADDR_EXPR)
8630 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8631 &bitsize, &bitpos1, &offset1, &mode,
8632 &unsignedp, &volatilep, false);
8633 if (TREE_CODE (base1) == INDIRECT_REF)
8634 base1 = TREE_OPERAND (base1, 0);
8636 indirect_base1 = true;
8638 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8640 base1 = TREE_OPERAND (arg1, 0);
8641 STRIP_SIGN_NOPS (base1);
8642 if (TREE_CODE (base1) == ADDR_EXPR)
8644 base1 = TREE_OPERAND (base1, 0);
8645 indirect_base1 = true;
8647 offset1 = TREE_OPERAND (arg1, 1);
8650 /* A local variable can never be pointed to by
8651 the default SSA name of an incoming parameter. */
8652 if ((TREE_CODE (arg0) == ADDR_EXPR
8654 && TREE_CODE (base0) == VAR_DECL
8655 && auto_var_in_fn_p (base0, current_function_decl)
8657 && TREE_CODE (base1) == SSA_NAME
8658 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8659 && SSA_NAME_IS_DEFAULT_DEF (base1))
8660 || (TREE_CODE (arg1) == ADDR_EXPR
8662 && TREE_CODE (base1) == VAR_DECL
8663 && auto_var_in_fn_p (base1, current_function_decl)
8665 && TREE_CODE (base0) == SSA_NAME
8666 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8667 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8669 if (code == NE_EXPR)
8670 return constant_boolean_node (1, type);
8671 else if (code == EQ_EXPR)
8672 return constant_boolean_node (0, type);
8674 /* If we have equivalent bases we might be able to simplify. */
8675 else if (indirect_base0 == indirect_base1
8676 && operand_equal_p (base0, base1, 0))
8678 /* We can fold this expression to a constant if the non-constant
8679 offset parts are equal. */
8680 if ((offset0 == offset1
8681 || (offset0 && offset1
8682 && operand_equal_p (offset0, offset1, 0)))
8685 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8690 && bitpos0 != bitpos1
8691 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8692 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8693 fold_overflow_warning (("assuming pointer wraparound does not "
8694 "occur when comparing P +- C1 with "
8696 WARN_STRICT_OVERFLOW_CONDITIONAL);
8701 return constant_boolean_node (bitpos0 == bitpos1, type);
8703 return constant_boolean_node (bitpos0 != bitpos1, type);
8705 return constant_boolean_node (bitpos0 < bitpos1, type);
8707 return constant_boolean_node (bitpos0 <= bitpos1, type);
8709 return constant_boolean_node (bitpos0 >= bitpos1, type);
8711 return constant_boolean_node (bitpos0 > bitpos1, type);
8715 /* We can simplify the comparison to a comparison of the variable
8716 offset parts if the constant offset parts are equal.
8717 Be careful to use signed size type here because otherwise we
8718 mess with array offsets in the wrong way. This is possible
8719 because pointer arithmetic is restricted to retain within an
8720 object and overflow on pointer differences is undefined as of
8721 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8722 else if (bitpos0 == bitpos1
8723 && ((code == EQ_EXPR || code == NE_EXPR)
8724 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8726 /* By converting to signed size type we cover middle-end pointer
8727 arithmetic which operates on unsigned pointer types of size
8728 type size and ARRAY_REF offsets which are properly sign or
8729 zero extended from their type in case it is narrower than
8731 if (offset0 == NULL_TREE)
8732 offset0 = build_int_cst (ssizetype, 0);
8734 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8735 if (offset1 == NULL_TREE)
8736 offset1 = build_int_cst (ssizetype, 0);
8738 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8742 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8743 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8744 fold_overflow_warning (("assuming pointer wraparound does not "
8745 "occur when comparing P +- C1 with "
8747 WARN_STRICT_OVERFLOW_COMPARISON);
8749 return fold_build2_loc (loc, code, type, offset0, offset1);
8752 /* For non-equal bases we can simplify if they are addresses
8753 of local binding decls or constants. */
8754 else if (indirect_base0 && indirect_base1
8755 /* We know that !operand_equal_p (base0, base1, 0)
8756 because the if condition was false. But make
8757 sure two decls are not the same. */
8759 && TREE_CODE (arg0) == ADDR_EXPR
8760 && TREE_CODE (arg1) == ADDR_EXPR
8761 && (((TREE_CODE (base0) == VAR_DECL
8762 || TREE_CODE (base0) == PARM_DECL)
8763 && (targetm.binds_local_p (base0)
8764 || CONSTANT_CLASS_P (base1)))
8765 || CONSTANT_CLASS_P (base0))
8766 && (((TREE_CODE (base1) == VAR_DECL
8767 || TREE_CODE (base1) == PARM_DECL)
8768 && (targetm.binds_local_p (base1)
8769 || CONSTANT_CLASS_P (base0)))
8770 || CONSTANT_CLASS_P (base1)))
8772 if (code == EQ_EXPR)
8773 return omit_two_operands_loc (loc, type, boolean_false_node,
8775 else if (code == NE_EXPR)
8776 return omit_two_operands_loc (loc, type, boolean_true_node,
8779 /* For equal offsets we can simplify to a comparison of the
8781 else if (bitpos0 == bitpos1
8783 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8785 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8786 && ((offset0 == offset1)
8787 || (offset0 && offset1
8788 && operand_equal_p (offset0, offset1, 0))))
8791 base0 = build_fold_addr_expr_loc (loc, base0);
8793 base1 = build_fold_addr_expr_loc (loc, base1);
8794 return fold_build2_loc (loc, code, type, base0, base1);
8798 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8799 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8800 the resulting offset is smaller in absolute value than the
8802 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8803 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8804 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8805 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8806 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8807 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8808 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8810 tree const1 = TREE_OPERAND (arg0, 1);
8811 tree const2 = TREE_OPERAND (arg1, 1);
8812 tree variable1 = TREE_OPERAND (arg0, 0);
8813 tree variable2 = TREE_OPERAND (arg1, 0);
8815 const char * const warnmsg = G_("assuming signed overflow does not "
8816 "occur when combining constants around "
8819 /* Put the constant on the side where it doesn't overflow and is
8820 of lower absolute value than before. */
8821 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8822 ? MINUS_EXPR : PLUS_EXPR,
8824 if (!TREE_OVERFLOW (cst)
8825 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8827 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8828 return fold_build2_loc (loc, code, type,
8830 fold_build2_loc (loc,
8831 TREE_CODE (arg1), TREE_TYPE (arg1),
8835 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8836 ? MINUS_EXPR : PLUS_EXPR,
8838 if (!TREE_OVERFLOW (cst)
8839 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8841 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8842 return fold_build2_loc (loc, code, type,
8843 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8849 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8850 signed arithmetic case. That form is created by the compiler
8851 often enough for folding it to be of value. One example is in
8852 computing loop trip counts after Operator Strength Reduction. */
8853 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8854 && TREE_CODE (arg0) == MULT_EXPR
8855 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8856 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8857 && integer_zerop (arg1))
8859 tree const1 = TREE_OPERAND (arg0, 1);
8860 tree const2 = arg1; /* zero */
8861 tree variable1 = TREE_OPERAND (arg0, 0);
8862 enum tree_code cmp_code = code;
8864 /* Handle unfolded multiplication by zero. */
8865 if (integer_zerop (const1))
8866 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8868 fold_overflow_warning (("assuming signed overflow does not occur when "
8869 "eliminating multiplication in comparison "
8871 WARN_STRICT_OVERFLOW_COMPARISON);
8873 /* If const1 is negative we swap the sense of the comparison. */
8874 if (tree_int_cst_sgn (const1) < 0)
8875 cmp_code = swap_tree_comparison (cmp_code);
8877 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8880 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8884 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8886 tree targ0 = strip_float_extensions (arg0);
8887 tree targ1 = strip_float_extensions (arg1);
8888 tree newtype = TREE_TYPE (targ0);
8890 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8891 newtype = TREE_TYPE (targ1);
8893 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8894 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8895 return fold_build2_loc (loc, code, type,
8896 fold_convert_loc (loc, newtype, targ0),
8897 fold_convert_loc (loc, newtype, targ1));
8899 /* (-a) CMP (-b) -> b CMP a */
8900 if (TREE_CODE (arg0) == NEGATE_EXPR
8901 && TREE_CODE (arg1) == NEGATE_EXPR)
8902 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8903 TREE_OPERAND (arg0, 0));
8905 if (TREE_CODE (arg1) == REAL_CST)
8907 REAL_VALUE_TYPE cst;
8908 cst = TREE_REAL_CST (arg1);
8910 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8911 if (TREE_CODE (arg0) == NEGATE_EXPR)
8912 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8913 TREE_OPERAND (arg0, 0),
8914 build_real (TREE_TYPE (arg1),
8915 real_value_negate (&cst)));
8917 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8918 /* a CMP (-0) -> a CMP 0 */
8919 if (REAL_VALUE_MINUS_ZERO (cst))
8920 return fold_build2_loc (loc, code, type, arg0,
8921 build_real (TREE_TYPE (arg1), dconst0));
8923 /* x != NaN is always true, other ops are always false. */
8924 if (REAL_VALUE_ISNAN (cst)
8925 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8927 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8928 return omit_one_operand_loc (loc, type, tem, arg0);
8931 /* Fold comparisons against infinity. */
8932 if (REAL_VALUE_ISINF (cst)
8933 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8935 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8936 if (tem != NULL_TREE)
8941 /* If this is a comparison of a real constant with a PLUS_EXPR
8942 or a MINUS_EXPR of a real constant, we can convert it into a
8943 comparison with a revised real constant as long as no overflow
8944 occurs when unsafe_math_optimizations are enabled. */
8945 if (flag_unsafe_math_optimizations
8946 && TREE_CODE (arg1) == REAL_CST
8947 && (TREE_CODE (arg0) == PLUS_EXPR
8948 || TREE_CODE (arg0) == MINUS_EXPR)
8949 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8950 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8951 ? MINUS_EXPR : PLUS_EXPR,
8952 arg1, TREE_OPERAND (arg0, 1)))
8953 && !TREE_OVERFLOW (tem))
8954 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8956 /* Likewise, we can simplify a comparison of a real constant with
8957 a MINUS_EXPR whose first operand is also a real constant, i.e.
8958 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8959 floating-point types only if -fassociative-math is set. */
8960 if (flag_associative_math
8961 && TREE_CODE (arg1) == REAL_CST
8962 && TREE_CODE (arg0) == MINUS_EXPR
8963 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8964 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8966 && !TREE_OVERFLOW (tem))
8967 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8968 TREE_OPERAND (arg0, 1), tem);
8970 /* Fold comparisons against built-in math functions. */
8971 if (TREE_CODE (arg1) == REAL_CST
8972 && flag_unsafe_math_optimizations
8973 && ! flag_errno_math)
8975 enum built_in_function fcode = builtin_mathfn_code (arg0);
8977 if (fcode != END_BUILTINS)
8979 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8980 if (tem != NULL_TREE)
8986 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8987 && CONVERT_EXPR_P (arg0))
8989 /* If we are widening one operand of an integer comparison,
8990 see if the other operand is similarly being widened. Perhaps we
8991 can do the comparison in the narrower type. */
8992 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8996 /* Or if we are changing signedness. */
8997 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9002 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9003 constant, we can simplify it. */
9004 if (TREE_CODE (arg1) == INTEGER_CST
9005 && (TREE_CODE (arg0) == MIN_EXPR
9006 || TREE_CODE (arg0) == MAX_EXPR)
9007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9009 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9014 /* Simplify comparison of something with itself. (For IEEE
9015 floating-point, we can only do some of these simplifications.) */
9016 if (operand_equal_p (arg0, arg1, 0))
9021 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9022 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9023 return constant_boolean_node (1, type);
9028 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9029 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9030 return constant_boolean_node (1, type);
9031 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9034 /* For NE, we can only do this simplification if integer
9035 or we don't honor IEEE floating point NaNs. */
9036 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9037 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9039 /* ... fall through ... */
9042 return constant_boolean_node (0, type);
9048 /* If we are comparing an expression that just has comparisons
9049 of two integer values, arithmetic expressions of those comparisons,
9050 and constants, we can simplify it. There are only three cases
9051 to check: the two values can either be equal, the first can be
9052 greater, or the second can be greater. Fold the expression for
9053 those three values. Since each value must be 0 or 1, we have
9054 eight possibilities, each of which corresponds to the constant 0
9055 or 1 or one of the six possible comparisons.
9057 This handles common cases like (a > b) == 0 but also handles
9058 expressions like ((x > y) - (y > x)) > 0, which supposedly
9059 occur in macroized code. */
9061 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9063 tree cval1 = 0, cval2 = 0;
9066 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9067 /* Don't handle degenerate cases here; they should already
9068 have been handled anyway. */
9069 && cval1 != 0 && cval2 != 0
9070 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9071 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9072 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9073 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9074 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9075 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9076 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9078 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9079 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9081 /* We can't just pass T to eval_subst in case cval1 or cval2
9082 was the same as ARG1. */
9085 = fold_build2_loc (loc, code, type,
9086 eval_subst (loc, arg0, cval1, maxval,
9090 = fold_build2_loc (loc, code, type,
9091 eval_subst (loc, arg0, cval1, maxval,
9095 = fold_build2_loc (loc, code, type,
9096 eval_subst (loc, arg0, cval1, minval,
9100 /* All three of these results should be 0 or 1. Confirm they are.
9101 Then use those values to select the proper code to use. */
9103 if (TREE_CODE (high_result) == INTEGER_CST
9104 && TREE_CODE (equal_result) == INTEGER_CST
9105 && TREE_CODE (low_result) == INTEGER_CST)
9107 /* Make a 3-bit mask with the high-order bit being the
9108 value for `>', the next for '=', and the low for '<'. */
9109 switch ((integer_onep (high_result) * 4)
9110 + (integer_onep (equal_result) * 2)
9111 + integer_onep (low_result))
9115 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9136 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9141 tem = save_expr (build2 (code, type, cval1, cval2));
9142 SET_EXPR_LOCATION (tem, loc);
9145 return fold_build2_loc (loc, code, type, cval1, cval2);
9150 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9151 into a single range test. */
9152 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9153 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9154 && TREE_CODE (arg1) == INTEGER_CST
9155 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9156 && !integer_zerop (TREE_OPERAND (arg0, 1))
9157 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9158 && !TREE_OVERFLOW (arg1))
9160 tem = fold_div_compare (loc, code, type, arg0, arg1);
9161 if (tem != NULL_TREE)
9165 /* Fold ~X op ~Y as Y op X. */
9166 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9167 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9169 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9170 return fold_build2_loc (loc, code, type,
9171 fold_convert_loc (loc, cmp_type,
9172 TREE_OPERAND (arg1, 0)),
9173 TREE_OPERAND (arg0, 0));
9176 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9177 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9178 && TREE_CODE (arg1) == INTEGER_CST)
9180 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9181 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9182 TREE_OPERAND (arg0, 0),
9183 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9184 fold_convert_loc (loc, cmp_type, arg1)));
9191 /* Subroutine of fold_binary. Optimize complex multiplications of the
9192 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9193 argument EXPR represents the expression "z" of type TYPE. */
9196 fold_mult_zconjz (location_t loc, tree type, tree expr)
9198 tree itype = TREE_TYPE (type);
9199 tree rpart, ipart, tem;
9201 if (TREE_CODE (expr) == COMPLEX_EXPR)
9203 rpart = TREE_OPERAND (expr, 0);
9204 ipart = TREE_OPERAND (expr, 1);
9206 else if (TREE_CODE (expr) == COMPLEX_CST)
9208 rpart = TREE_REALPART (expr);
9209 ipart = TREE_IMAGPART (expr);
9213 expr = save_expr (expr);
9214 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9215 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9218 rpart = save_expr (rpart);
9219 ipart = save_expr (ipart);
9220 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9221 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9222 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9223 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9224 build_zero_cst (itype));
9228 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9229 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9230 guarantees that P and N have the same least significant log2(M) bits.
9231 N is not otherwise constrained. In particular, N is not normalized to
9232 0 <= N < M as is common. In general, the precise value of P is unknown.
9233 M is chosen as large as possible such that constant N can be determined.
9235 Returns M and sets *RESIDUE to N.
9237 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9238 account. This is not always possible due to PR 35705.
9241 static unsigned HOST_WIDE_INT
9242 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9243 bool allow_func_align)
9245 enum tree_code code;
9249 code = TREE_CODE (expr);
9250 if (code == ADDR_EXPR)
9252 expr = TREE_OPERAND (expr, 0);
9253 if (handled_component_p (expr))
9255 HOST_WIDE_INT bitsize, bitpos;
9257 enum machine_mode mode;
9258 int unsignedp, volatilep;
9260 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9261 &mode, &unsignedp, &volatilep, false);
9262 *residue = bitpos / BITS_PER_UNIT;
9265 if (TREE_CODE (offset) == INTEGER_CST)
9266 *residue += TREE_INT_CST_LOW (offset);
9268 /* We don't handle more complicated offset expressions. */
9274 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9275 return DECL_ALIGN_UNIT (expr);
9277 else if (code == POINTER_PLUS_EXPR)
9280 unsigned HOST_WIDE_INT modulus;
9281 enum tree_code inner_code;
9283 op0 = TREE_OPERAND (expr, 0);
9285 modulus = get_pointer_modulus_and_residue (op0, residue,
9288 op1 = TREE_OPERAND (expr, 1);
9290 inner_code = TREE_CODE (op1);
9291 if (inner_code == INTEGER_CST)
9293 *residue += TREE_INT_CST_LOW (op1);
9296 else if (inner_code == MULT_EXPR)
9298 op1 = TREE_OPERAND (op1, 1);
9299 if (TREE_CODE (op1) == INTEGER_CST)
9301 unsigned HOST_WIDE_INT align;
9303 /* Compute the greatest power-of-2 divisor of op1. */
9304 align = TREE_INT_CST_LOW (op1);
9307 /* If align is non-zero and less than *modulus, replace
9308 *modulus with align., If align is 0, then either op1 is 0
9309 or the greatest power-of-2 divisor of op1 doesn't fit in an
9310 unsigned HOST_WIDE_INT. In either case, no additional
9311 constraint is imposed. */
9313 modulus = MIN (modulus, align);
9320 /* If we get here, we were unable to determine anything useful about the
9326 /* Fold a binary expression of code CODE and type TYPE with operands
9327 OP0 and OP1. LOC is the location of the resulting expression.
9328 Return the folded expression if folding is successful. Otherwise,
9329 return NULL_TREE. */
9332 fold_binary_loc (location_t loc,
9333 enum tree_code code, tree type, tree op0, tree op1)
9335 enum tree_code_class kind = TREE_CODE_CLASS (code);
9336 tree arg0, arg1, tem;
9337 tree t1 = NULL_TREE;
9338 bool strict_overflow_p;
9340 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9341 && TREE_CODE_LENGTH (code) == 2
9343 && op1 != NULL_TREE);
9348 /* Strip any conversions that don't change the mode. This is
9349 safe for every expression, except for a comparison expression
9350 because its signedness is derived from its operands. So, in
9351 the latter case, only strip conversions that don't change the
9352 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9355 Note that this is done as an internal manipulation within the
9356 constant folder, in order to find the simplest representation
9357 of the arguments so that their form can be studied. In any
9358 cases, the appropriate type conversions should be put back in
9359 the tree that will get out of the constant folder. */
9361 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9363 STRIP_SIGN_NOPS (arg0);
9364 STRIP_SIGN_NOPS (arg1);
9372 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9373 constant but we can't do arithmetic on them. */
9374 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9375 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9376 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9377 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9378 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9379 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9381 if (kind == tcc_binary)
9383 /* Make sure type and arg0 have the same saturating flag. */
9384 gcc_assert (TYPE_SATURATING (type)
9385 == TYPE_SATURATING (TREE_TYPE (arg0)));
9386 tem = const_binop (code, arg0, arg1);
9388 else if (kind == tcc_comparison)
9389 tem = fold_relational_const (code, type, arg0, arg1);
9393 if (tem != NULL_TREE)
9395 if (TREE_TYPE (tem) != type)
9396 tem = fold_convert_loc (loc, type, tem);
9401 /* If this is a commutative operation, and ARG0 is a constant, move it
9402 to ARG1 to reduce the number of tests below. */
9403 if (commutative_tree_code (code)
9404 && tree_swap_operands_p (arg0, arg1, true))
9405 return fold_build2_loc (loc, code, type, op1, op0);
9407 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9409 First check for cases where an arithmetic operation is applied to a
9410 compound, conditional, or comparison operation. Push the arithmetic
9411 operation inside the compound or conditional to see if any folding
9412 can then be done. Convert comparison to conditional for this purpose.
9413 The also optimizes non-constant cases that used to be done in
9416 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9417 one of the operands is a comparison and the other is a comparison, a
9418 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9419 code below would make the expression more complex. Change it to a
9420 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9421 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9423 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9424 || code == EQ_EXPR || code == NE_EXPR)
9425 && ((truth_value_p (TREE_CODE (arg0))
9426 && (truth_value_p (TREE_CODE (arg1))
9427 || (TREE_CODE (arg1) == BIT_AND_EXPR
9428 && integer_onep (TREE_OPERAND (arg1, 1)))))
9429 || (truth_value_p (TREE_CODE (arg1))
9430 && (truth_value_p (TREE_CODE (arg0))
9431 || (TREE_CODE (arg0) == BIT_AND_EXPR
9432 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9434 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9435 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9438 fold_convert_loc (loc, boolean_type_node, arg0),
9439 fold_convert_loc (loc, boolean_type_node, arg1));
9441 if (code == EQ_EXPR)
9442 tem = invert_truthvalue_loc (loc, tem);
9444 return fold_convert_loc (loc, type, tem);
9447 if (TREE_CODE_CLASS (code) == tcc_binary
9448 || TREE_CODE_CLASS (code) == tcc_comparison)
9450 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9452 tem = fold_build2_loc (loc, code, type,
9453 fold_convert_loc (loc, TREE_TYPE (op0),
9454 TREE_OPERAND (arg0, 1)), op1);
9455 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9458 if (TREE_CODE (arg1) == COMPOUND_EXPR
9459 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9461 tem = fold_build2_loc (loc, code, type, op0,
9462 fold_convert_loc (loc, TREE_TYPE (op1),
9463 TREE_OPERAND (arg1, 1)));
9464 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9468 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9470 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9472 /*cond_first_p=*/1);
9473 if (tem != NULL_TREE)
9477 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9479 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9481 /*cond_first_p=*/0);
9482 if (tem != NULL_TREE)
9490 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9491 if (TREE_CODE (arg0) == ADDR_EXPR
9492 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9494 tree iref = TREE_OPERAND (arg0, 0);
9495 return fold_build2 (MEM_REF, type,
9496 TREE_OPERAND (iref, 0),
9497 int_const_binop (PLUS_EXPR, arg1,
9498 TREE_OPERAND (iref, 1), 0));
9501 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9502 if (TREE_CODE (arg0) == ADDR_EXPR
9503 && handled_component_p (TREE_OPERAND (arg0, 0)))
9506 HOST_WIDE_INT coffset;
9507 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9511 return fold_build2 (MEM_REF, type,
9512 build_fold_addr_expr (base),
9513 int_const_binop (PLUS_EXPR, arg1,
9514 size_int (coffset), 0));
9519 case POINTER_PLUS_EXPR:
9520 /* 0 +p index -> (type)index */
9521 if (integer_zerop (arg0))
9522 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9524 /* PTR +p 0 -> PTR */
9525 if (integer_zerop (arg1))
9526 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9528 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9529 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9530 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9531 return fold_convert_loc (loc, type,
9532 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9533 fold_convert_loc (loc, sizetype,
9535 fold_convert_loc (loc, sizetype,
9538 /* index +p PTR -> PTR +p index */
9539 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9540 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9541 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9542 fold_convert_loc (loc, type, arg1),
9543 fold_convert_loc (loc, sizetype, arg0));
9545 /* (PTR +p B) +p A -> PTR +p (B + A) */
9546 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9549 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9550 tree arg00 = TREE_OPERAND (arg0, 0);
9551 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9552 arg01, fold_convert_loc (loc, sizetype, arg1));
9553 return fold_convert_loc (loc, type,
9554 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9559 /* PTR_CST +p CST -> CST1 */
9560 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9561 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9562 fold_convert_loc (loc, type, arg1));
9564 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9565 of the array. Loop optimizer sometimes produce this type of
9567 if (TREE_CODE (arg0) == ADDR_EXPR)
9569 tem = try_move_mult_to_index (loc, arg0,
9570 fold_convert_loc (loc, sizetype, arg1));
9572 return fold_convert_loc (loc, type, tem);
9578 /* A + (-B) -> A - B */
9579 if (TREE_CODE (arg1) == NEGATE_EXPR)
9580 return fold_build2_loc (loc, MINUS_EXPR, type,
9581 fold_convert_loc (loc, type, arg0),
9582 fold_convert_loc (loc, type,
9583 TREE_OPERAND (arg1, 0)));
9584 /* (-A) + B -> B - A */
9585 if (TREE_CODE (arg0) == NEGATE_EXPR
9586 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9587 return fold_build2_loc (loc, MINUS_EXPR, type,
9588 fold_convert_loc (loc, type, arg1),
9589 fold_convert_loc (loc, type,
9590 TREE_OPERAND (arg0, 0)));
9592 if (INTEGRAL_TYPE_P (type))
9594 /* Convert ~A + 1 to -A. */
9595 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9596 && integer_onep (arg1))
9597 return fold_build1_loc (loc, NEGATE_EXPR, type,
9598 fold_convert_loc (loc, type,
9599 TREE_OPERAND (arg0, 0)));
9602 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9603 && !TYPE_OVERFLOW_TRAPS (type))
9605 tree tem = TREE_OPERAND (arg0, 0);
9608 if (operand_equal_p (tem, arg1, 0))
9610 t1 = build_int_cst_type (type, -1);
9611 return omit_one_operand_loc (loc, type, t1, arg1);
9616 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9617 && !TYPE_OVERFLOW_TRAPS (type))
9619 tree tem = TREE_OPERAND (arg1, 0);
9622 if (operand_equal_p (arg0, tem, 0))
9624 t1 = build_int_cst_type (type, -1);
9625 return omit_one_operand_loc (loc, type, t1, arg0);
9629 /* X + (X / CST) * -CST is X % CST. */
9630 if (TREE_CODE (arg1) == MULT_EXPR
9631 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9632 && operand_equal_p (arg0,
9633 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9635 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9636 tree cst1 = TREE_OPERAND (arg1, 1);
9637 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9639 if (sum && integer_zerop (sum))
9640 return fold_convert_loc (loc, type,
9641 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9642 TREE_TYPE (arg0), arg0,
9647 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9648 same or one. Make sure type is not saturating.
9649 fold_plusminus_mult_expr will re-associate. */
9650 if ((TREE_CODE (arg0) == MULT_EXPR
9651 || TREE_CODE (arg1) == MULT_EXPR)
9652 && !TYPE_SATURATING (type)
9653 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9655 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9660 if (! FLOAT_TYPE_P (type))
9662 if (integer_zerop (arg1))
9663 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9665 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9666 with a constant, and the two constants have no bits in common,
9667 we should treat this as a BIT_IOR_EXPR since this may produce more
9669 if (TREE_CODE (arg0) == BIT_AND_EXPR
9670 && TREE_CODE (arg1) == BIT_AND_EXPR
9671 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9672 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9673 && integer_zerop (const_binop (BIT_AND_EXPR,
9674 TREE_OPERAND (arg0, 1),
9675 TREE_OPERAND (arg1, 1))))
9677 code = BIT_IOR_EXPR;
9681 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9682 (plus (plus (mult) (mult)) (foo)) so that we can
9683 take advantage of the factoring cases below. */
9684 if (((TREE_CODE (arg0) == PLUS_EXPR
9685 || TREE_CODE (arg0) == MINUS_EXPR)
9686 && TREE_CODE (arg1) == MULT_EXPR)
9687 || ((TREE_CODE (arg1) == PLUS_EXPR
9688 || TREE_CODE (arg1) == MINUS_EXPR)
9689 && TREE_CODE (arg0) == MULT_EXPR))
9691 tree parg0, parg1, parg, marg;
9692 enum tree_code pcode;
9694 if (TREE_CODE (arg1) == MULT_EXPR)
9695 parg = arg0, marg = arg1;
9697 parg = arg1, marg = arg0;
9698 pcode = TREE_CODE (parg);
9699 parg0 = TREE_OPERAND (parg, 0);
9700 parg1 = TREE_OPERAND (parg, 1);
9704 if (TREE_CODE (parg0) == MULT_EXPR
9705 && TREE_CODE (parg1) != MULT_EXPR)
9706 return fold_build2_loc (loc, pcode, type,
9707 fold_build2_loc (loc, PLUS_EXPR, type,
9708 fold_convert_loc (loc, type,
9710 fold_convert_loc (loc, type,
9712 fold_convert_loc (loc, type, parg1));
9713 if (TREE_CODE (parg0) != MULT_EXPR
9714 && TREE_CODE (parg1) == MULT_EXPR)
9716 fold_build2_loc (loc, PLUS_EXPR, type,
9717 fold_convert_loc (loc, type, parg0),
9718 fold_build2_loc (loc, pcode, type,
9719 fold_convert_loc (loc, type, marg),
9720 fold_convert_loc (loc, type,
9726 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9727 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9728 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9730 /* Likewise if the operands are reversed. */
9731 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9732 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9734 /* Convert X + -C into X - C. */
9735 if (TREE_CODE (arg1) == REAL_CST
9736 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9738 tem = fold_negate_const (arg1, type);
9739 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9740 return fold_build2_loc (loc, MINUS_EXPR, type,
9741 fold_convert_loc (loc, type, arg0),
9742 fold_convert_loc (loc, type, tem));
9745 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9746 to __complex__ ( x, y ). This is not the same for SNaNs or
9747 if signed zeros are involved. */
9748 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9749 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9750 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9752 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9753 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9754 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9755 bool arg0rz = false, arg0iz = false;
9756 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9757 || (arg0i && (arg0iz = real_zerop (arg0i))))
9759 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9760 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9761 if (arg0rz && arg1i && real_zerop (arg1i))
9763 tree rp = arg1r ? arg1r
9764 : build1 (REALPART_EXPR, rtype, arg1);
9765 tree ip = arg0i ? arg0i
9766 : build1 (IMAGPART_EXPR, rtype, arg0);
9767 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9769 else if (arg0iz && arg1r && real_zerop (arg1r))
9771 tree rp = arg0r ? arg0r
9772 : build1 (REALPART_EXPR, rtype, arg0);
9773 tree ip = arg1i ? arg1i
9774 : build1 (IMAGPART_EXPR, rtype, arg1);
9775 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9780 if (flag_unsafe_math_optimizations
9781 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9782 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9783 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9786 /* Convert x+x into x*2.0. */
9787 if (operand_equal_p (arg0, arg1, 0)
9788 && SCALAR_FLOAT_TYPE_P (type))
9789 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9790 build_real (type, dconst2));
9792 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9793 We associate floats only if the user has specified
9794 -fassociative-math. */
9795 if (flag_associative_math
9796 && TREE_CODE (arg1) == PLUS_EXPR
9797 && TREE_CODE (arg0) != MULT_EXPR)
9799 tree tree10 = TREE_OPERAND (arg1, 0);
9800 tree tree11 = TREE_OPERAND (arg1, 1);
9801 if (TREE_CODE (tree11) == MULT_EXPR
9802 && TREE_CODE (tree10) == MULT_EXPR)
9805 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9806 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9809 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9810 We associate floats only if the user has specified
9811 -fassociative-math. */
9812 if (flag_associative_math
9813 && TREE_CODE (arg0) == PLUS_EXPR
9814 && TREE_CODE (arg1) != MULT_EXPR)
9816 tree tree00 = TREE_OPERAND (arg0, 0);
9817 tree tree01 = TREE_OPERAND (arg0, 1);
9818 if (TREE_CODE (tree01) == MULT_EXPR
9819 && TREE_CODE (tree00) == MULT_EXPR)
9822 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9823 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9829 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9830 is a rotate of A by C1 bits. */
9831 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9832 is a rotate of A by B bits. */
9834 enum tree_code code0, code1;
9836 code0 = TREE_CODE (arg0);
9837 code1 = TREE_CODE (arg1);
9838 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9839 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9840 && operand_equal_p (TREE_OPERAND (arg0, 0),
9841 TREE_OPERAND (arg1, 0), 0)
9842 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9843 TYPE_UNSIGNED (rtype))
9844 /* Only create rotates in complete modes. Other cases are not
9845 expanded properly. */
9846 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9848 tree tree01, tree11;
9849 enum tree_code code01, code11;
9851 tree01 = TREE_OPERAND (arg0, 1);
9852 tree11 = TREE_OPERAND (arg1, 1);
9853 STRIP_NOPS (tree01);
9854 STRIP_NOPS (tree11);
9855 code01 = TREE_CODE (tree01);
9856 code11 = TREE_CODE (tree11);
9857 if (code01 == INTEGER_CST
9858 && code11 == INTEGER_CST
9859 && TREE_INT_CST_HIGH (tree01) == 0
9860 && TREE_INT_CST_HIGH (tree11) == 0
9861 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9862 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9864 tem = build2_loc (loc, LROTATE_EXPR,
9865 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9866 TREE_OPERAND (arg0, 0),
9867 code0 == LSHIFT_EXPR ? tree01 : tree11);
9868 return fold_convert_loc (loc, type, tem);
9870 else if (code11 == MINUS_EXPR)
9872 tree tree110, tree111;
9873 tree110 = TREE_OPERAND (tree11, 0);
9874 tree111 = TREE_OPERAND (tree11, 1);
9875 STRIP_NOPS (tree110);
9876 STRIP_NOPS (tree111);
9877 if (TREE_CODE (tree110) == INTEGER_CST
9878 && 0 == compare_tree_int (tree110,
9880 (TREE_TYPE (TREE_OPERAND
9882 && operand_equal_p (tree01, tree111, 0))
9884 fold_convert_loc (loc, type,
9885 build2 ((code0 == LSHIFT_EXPR
9888 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9889 TREE_OPERAND (arg0, 0), tree01));
9891 else if (code01 == MINUS_EXPR)
9893 tree tree010, tree011;
9894 tree010 = TREE_OPERAND (tree01, 0);
9895 tree011 = TREE_OPERAND (tree01, 1);
9896 STRIP_NOPS (tree010);
9897 STRIP_NOPS (tree011);
9898 if (TREE_CODE (tree010) == INTEGER_CST
9899 && 0 == compare_tree_int (tree010,
9901 (TREE_TYPE (TREE_OPERAND
9903 && operand_equal_p (tree11, tree011, 0))
9904 return fold_convert_loc
9906 build2 ((code0 != LSHIFT_EXPR
9909 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9910 TREE_OPERAND (arg0, 0), tree11));
9916 /* In most languages, can't associate operations on floats through
9917 parentheses. Rather than remember where the parentheses were, we
9918 don't associate floats at all, unless the user has specified
9920 And, we need to make sure type is not saturating. */
9922 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9923 && !TYPE_SATURATING (type))
9925 tree var0, con0, lit0, minus_lit0;
9926 tree var1, con1, lit1, minus_lit1;
9929 /* Split both trees into variables, constants, and literals. Then
9930 associate each group together, the constants with literals,
9931 then the result with variables. This increases the chances of
9932 literals being recombined later and of generating relocatable
9933 expressions for the sum of a constant and literal. */
9934 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9935 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9936 code == MINUS_EXPR);
9938 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9939 if (code == MINUS_EXPR)
9942 /* With undefined overflow we can only associate constants with one
9943 variable, and constants whose association doesn't overflow. */
9944 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9945 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9952 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9953 tmp0 = TREE_OPERAND (tmp0, 0);
9954 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9955 tmp1 = TREE_OPERAND (tmp1, 0);
9956 /* The only case we can still associate with two variables
9957 is if they are the same, modulo negation. */
9958 if (!operand_equal_p (tmp0, tmp1, 0))
9962 if (ok && lit0 && lit1)
9964 tree tmp0 = fold_convert (type, lit0);
9965 tree tmp1 = fold_convert (type, lit1);
9967 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9968 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9973 /* Only do something if we found more than two objects. Otherwise,
9974 nothing has changed and we risk infinite recursion. */
9976 && (2 < ((var0 != 0) + (var1 != 0)
9977 + (con0 != 0) + (con1 != 0)
9978 + (lit0 != 0) + (lit1 != 0)
9979 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9981 var0 = associate_trees (loc, var0, var1, code, type);
9982 con0 = associate_trees (loc, con0, con1, code, type);
9983 lit0 = associate_trees (loc, lit0, lit1, code, type);
9984 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9986 /* Preserve the MINUS_EXPR if the negative part of the literal is
9987 greater than the positive part. Otherwise, the multiplicative
9988 folding code (i.e extract_muldiv) may be fooled in case
9989 unsigned constants are subtracted, like in the following
9990 example: ((X*2 + 4) - 8U)/2. */
9991 if (minus_lit0 && lit0)
9993 if (TREE_CODE (lit0) == INTEGER_CST
9994 && TREE_CODE (minus_lit0) == INTEGER_CST
9995 && tree_int_cst_lt (lit0, minus_lit0))
9997 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10003 lit0 = associate_trees (loc, lit0, minus_lit0,
10012 fold_convert_loc (loc, type,
10013 associate_trees (loc, var0, minus_lit0,
10014 MINUS_EXPR, type));
10017 con0 = associate_trees (loc, con0, minus_lit0,
10020 fold_convert_loc (loc, type,
10021 associate_trees (loc, var0, con0,
10026 con0 = associate_trees (loc, con0, lit0, code, type);
10028 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10036 /* Pointer simplifications for subtraction, simple reassociations. */
10037 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10039 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10040 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10041 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10043 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10044 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10045 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10046 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10047 return fold_build2_loc (loc, PLUS_EXPR, type,
10048 fold_build2_loc (loc, MINUS_EXPR, type,
10050 fold_build2_loc (loc, MINUS_EXPR, type,
10053 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10054 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10056 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10057 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10058 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10059 fold_convert_loc (loc, type, arg1));
10061 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10064 /* A - (-B) -> A + B */
10065 if (TREE_CODE (arg1) == NEGATE_EXPR)
10066 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10067 fold_convert_loc (loc, type,
10068 TREE_OPERAND (arg1, 0)));
10069 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10070 if (TREE_CODE (arg0) == NEGATE_EXPR
10071 && (FLOAT_TYPE_P (type)
10072 || INTEGRAL_TYPE_P (type))
10073 && negate_expr_p (arg1)
10074 && reorder_operands_p (arg0, arg1))
10075 return fold_build2_loc (loc, MINUS_EXPR, type,
10076 fold_convert_loc (loc, type,
10077 negate_expr (arg1)),
10078 fold_convert_loc (loc, type,
10079 TREE_OPERAND (arg0, 0)));
10080 /* Convert -A - 1 to ~A. */
10081 if (INTEGRAL_TYPE_P (type)
10082 && TREE_CODE (arg0) == NEGATE_EXPR
10083 && integer_onep (arg1)
10084 && !TYPE_OVERFLOW_TRAPS (type))
10085 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10086 fold_convert_loc (loc, type,
10087 TREE_OPERAND (arg0, 0)));
10089 /* Convert -1 - A to ~A. */
10090 if (INTEGRAL_TYPE_P (type)
10091 && integer_all_onesp (arg0))
10092 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10095 /* X - (X / CST) * CST is X % CST. */
10096 if (INTEGRAL_TYPE_P (type)
10097 && TREE_CODE (arg1) == MULT_EXPR
10098 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10099 && operand_equal_p (arg0,
10100 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10101 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10102 TREE_OPERAND (arg1, 1), 0))
10104 fold_convert_loc (loc, type,
10105 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10106 arg0, TREE_OPERAND (arg1, 1)));
10108 if (! FLOAT_TYPE_P (type))
10110 if (integer_zerop (arg0))
10111 return negate_expr (fold_convert_loc (loc, type, arg1));
10112 if (integer_zerop (arg1))
10113 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10115 /* Fold A - (A & B) into ~B & A. */
10116 if (!TREE_SIDE_EFFECTS (arg0)
10117 && TREE_CODE (arg1) == BIT_AND_EXPR)
10119 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10121 tree arg10 = fold_convert_loc (loc, type,
10122 TREE_OPERAND (arg1, 0));
10123 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10124 fold_build1_loc (loc, BIT_NOT_EXPR,
10126 fold_convert_loc (loc, type, arg0));
10128 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10130 tree arg11 = fold_convert_loc (loc,
10131 type, TREE_OPERAND (arg1, 1));
10132 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10133 fold_build1_loc (loc, BIT_NOT_EXPR,
10135 fold_convert_loc (loc, type, arg0));
10139 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10140 any power of 2 minus 1. */
10141 if (TREE_CODE (arg0) == BIT_AND_EXPR
10142 && TREE_CODE (arg1) == BIT_AND_EXPR
10143 && operand_equal_p (TREE_OPERAND (arg0, 0),
10144 TREE_OPERAND (arg1, 0), 0))
10146 tree mask0 = TREE_OPERAND (arg0, 1);
10147 tree mask1 = TREE_OPERAND (arg1, 1);
10148 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10150 if (operand_equal_p (tem, mask1, 0))
10152 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10153 TREE_OPERAND (arg0, 0), mask1);
10154 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10159 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10160 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10161 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10163 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10164 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10165 (-ARG1 + ARG0) reduces to -ARG1. */
10166 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10167 return negate_expr (fold_convert_loc (loc, type, arg1));
10169 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10170 __complex__ ( x, -y ). This is not the same for SNaNs or if
10171 signed zeros are involved. */
10172 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10173 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10174 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10176 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10177 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10178 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10179 bool arg0rz = false, arg0iz = false;
10180 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10181 || (arg0i && (arg0iz = real_zerop (arg0i))))
10183 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10184 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10185 if (arg0rz && arg1i && real_zerop (arg1i))
10187 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10189 : build1 (REALPART_EXPR, rtype, arg1));
10190 tree ip = arg0i ? arg0i
10191 : build1 (IMAGPART_EXPR, rtype, arg0);
10192 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10194 else if (arg0iz && arg1r && real_zerop (arg1r))
10196 tree rp = arg0r ? arg0r
10197 : build1 (REALPART_EXPR, rtype, arg0);
10198 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10200 : build1 (IMAGPART_EXPR, rtype, arg1));
10201 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10206 /* Fold &x - &x. This can happen from &x.foo - &x.
10207 This is unsafe for certain floats even in non-IEEE formats.
10208 In IEEE, it is unsafe because it does wrong for NaNs.
10209 Also note that operand_equal_p is always false if an operand
10212 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10213 && operand_equal_p (arg0, arg1, 0))
10214 return build_zero_cst (type);
10216 /* A - B -> A + (-B) if B is easily negatable. */
10217 if (negate_expr_p (arg1)
10218 && ((FLOAT_TYPE_P (type)
10219 /* Avoid this transformation if B is a positive REAL_CST. */
10220 && (TREE_CODE (arg1) != REAL_CST
10221 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10222 || INTEGRAL_TYPE_P (type)))
10223 return fold_build2_loc (loc, PLUS_EXPR, type,
10224 fold_convert_loc (loc, type, arg0),
10225 fold_convert_loc (loc, type,
10226 negate_expr (arg1)));
10228 /* Try folding difference of addresses. */
10230 HOST_WIDE_INT diff;
10232 if ((TREE_CODE (arg0) == ADDR_EXPR
10233 || TREE_CODE (arg1) == ADDR_EXPR)
10234 && ptr_difference_const (arg0, arg1, &diff))
10235 return build_int_cst_type (type, diff);
10238 /* Fold &a[i] - &a[j] to i-j. */
10239 if (TREE_CODE (arg0) == ADDR_EXPR
10240 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10241 && TREE_CODE (arg1) == ADDR_EXPR
10242 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10244 tree aref0 = TREE_OPERAND (arg0, 0);
10245 tree aref1 = TREE_OPERAND (arg1, 0);
10246 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10247 TREE_OPERAND (aref1, 0), 0))
10249 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10250 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10251 tree esz = array_ref_element_size (aref0);
10252 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10253 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10254 fold_convert_loc (loc, type, esz));
10259 if (FLOAT_TYPE_P (type)
10260 && flag_unsafe_math_optimizations
10261 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10262 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10263 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10266 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10267 same or one. Make sure type is not saturating.
10268 fold_plusminus_mult_expr will re-associate. */
10269 if ((TREE_CODE (arg0) == MULT_EXPR
10270 || TREE_CODE (arg1) == MULT_EXPR)
10271 && !TYPE_SATURATING (type)
10272 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10274 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10282 /* (-A) * (-B) -> A * B */
10283 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10284 return fold_build2_loc (loc, MULT_EXPR, type,
10285 fold_convert_loc (loc, type,
10286 TREE_OPERAND (arg0, 0)),
10287 fold_convert_loc (loc, type,
10288 negate_expr (arg1)));
10289 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10290 return fold_build2_loc (loc, MULT_EXPR, type,
10291 fold_convert_loc (loc, type,
10292 negate_expr (arg0)),
10293 fold_convert_loc (loc, type,
10294 TREE_OPERAND (arg1, 0)));
10296 if (! FLOAT_TYPE_P (type))
10298 if (integer_zerop (arg1))
10299 return omit_one_operand_loc (loc, type, arg1, arg0);
10300 if (integer_onep (arg1))
10301 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10302 /* Transform x * -1 into -x. Make sure to do the negation
10303 on the original operand with conversions not stripped
10304 because we can only strip non-sign-changing conversions. */
10305 if (integer_all_onesp (arg1))
10306 return fold_convert_loc (loc, type, negate_expr (op0));
10307 /* Transform x * -C into -x * C if x is easily negatable. */
10308 if (TREE_CODE (arg1) == INTEGER_CST
10309 && tree_int_cst_sgn (arg1) == -1
10310 && negate_expr_p (arg0)
10311 && (tem = negate_expr (arg1)) != arg1
10312 && !TREE_OVERFLOW (tem))
10313 return fold_build2_loc (loc, MULT_EXPR, type,
10314 fold_convert_loc (loc, type,
10315 negate_expr (arg0)),
10318 /* (a * (1 << b)) is (a << b) */
10319 if (TREE_CODE (arg1) == LSHIFT_EXPR
10320 && integer_onep (TREE_OPERAND (arg1, 0)))
10321 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10322 TREE_OPERAND (arg1, 1));
10323 if (TREE_CODE (arg0) == LSHIFT_EXPR
10324 && integer_onep (TREE_OPERAND (arg0, 0)))
10325 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10326 TREE_OPERAND (arg0, 1));
10328 /* (A + A) * C -> A * 2 * C */
10329 if (TREE_CODE (arg0) == PLUS_EXPR
10330 && TREE_CODE (arg1) == INTEGER_CST
10331 && operand_equal_p (TREE_OPERAND (arg0, 0),
10332 TREE_OPERAND (arg0, 1), 0))
10333 return fold_build2_loc (loc, MULT_EXPR, type,
10334 omit_one_operand_loc (loc, type,
10335 TREE_OPERAND (arg0, 0),
10336 TREE_OPERAND (arg0, 1)),
10337 fold_build2_loc (loc, MULT_EXPR, type,
10338 build_int_cst (type, 2) , arg1));
10340 strict_overflow_p = false;
10341 if (TREE_CODE (arg1) == INTEGER_CST
10342 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10343 &strict_overflow_p)))
10345 if (strict_overflow_p)
10346 fold_overflow_warning (("assuming signed overflow does not "
10347 "occur when simplifying "
10349 WARN_STRICT_OVERFLOW_MISC);
10350 return fold_convert_loc (loc, type, tem);
10353 /* Optimize z * conj(z) for integer complex numbers. */
10354 if (TREE_CODE (arg0) == CONJ_EXPR
10355 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10356 return fold_mult_zconjz (loc, type, arg1);
10357 if (TREE_CODE (arg1) == CONJ_EXPR
10358 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10359 return fold_mult_zconjz (loc, type, arg0);
10363 /* Maybe fold x * 0 to 0. The expressions aren't the same
10364 when x is NaN, since x * 0 is also NaN. Nor are they the
10365 same in modes with signed zeros, since multiplying a
10366 negative value by 0 gives -0, not +0. */
10367 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10368 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10369 && real_zerop (arg1))
10370 return omit_one_operand_loc (loc, type, arg1, arg0);
10371 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10372 Likewise for complex arithmetic with signed zeros. */
10373 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10374 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10375 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10376 && real_onep (arg1))
10377 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10379 /* Transform x * -1.0 into -x. */
10380 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10381 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10382 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10383 && real_minus_onep (arg1))
10384 return fold_convert_loc (loc, type, negate_expr (arg0));
10386 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10387 the result for floating point types due to rounding so it is applied
10388 only if -fassociative-math was specify. */
10389 if (flag_associative_math
10390 && TREE_CODE (arg0) == RDIV_EXPR
10391 && TREE_CODE (arg1) == REAL_CST
10392 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10394 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10397 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10398 TREE_OPERAND (arg0, 1));
10401 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10402 if (operand_equal_p (arg0, arg1, 0))
10404 tree tem = fold_strip_sign_ops (arg0);
10405 if (tem != NULL_TREE)
10407 tem = fold_convert_loc (loc, type, tem);
10408 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10412 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10413 This is not the same for NaNs or if signed zeros are
10415 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10416 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10417 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10418 && TREE_CODE (arg1) == COMPLEX_CST
10419 && real_zerop (TREE_REALPART (arg1)))
10421 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10422 if (real_onep (TREE_IMAGPART (arg1)))
10424 fold_build2_loc (loc, COMPLEX_EXPR, type,
10425 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10427 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10428 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10430 fold_build2_loc (loc, COMPLEX_EXPR, type,
10431 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10432 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10436 /* Optimize z * conj(z) for floating point complex numbers.
10437 Guarded by flag_unsafe_math_optimizations as non-finite
10438 imaginary components don't produce scalar results. */
10439 if (flag_unsafe_math_optimizations
10440 && TREE_CODE (arg0) == CONJ_EXPR
10441 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10442 return fold_mult_zconjz (loc, type, arg1);
10443 if (flag_unsafe_math_optimizations
10444 && TREE_CODE (arg1) == CONJ_EXPR
10445 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10446 return fold_mult_zconjz (loc, type, arg0);
10448 if (flag_unsafe_math_optimizations)
10450 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10451 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10453 /* Optimizations of root(...)*root(...). */
10454 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10457 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10458 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10460 /* Optimize sqrt(x)*sqrt(x) as x. */
10461 if (BUILTIN_SQRT_P (fcode0)
10462 && operand_equal_p (arg00, arg10, 0)
10463 && ! HONOR_SNANS (TYPE_MODE (type)))
10466 /* Optimize root(x)*root(y) as root(x*y). */
10467 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10468 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10469 return build_call_expr_loc (loc, rootfn, 1, arg);
10472 /* Optimize expN(x)*expN(y) as expN(x+y). */
10473 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10475 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10476 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10477 CALL_EXPR_ARG (arg0, 0),
10478 CALL_EXPR_ARG (arg1, 0));
10479 return build_call_expr_loc (loc, expfn, 1, arg);
10482 /* Optimizations of pow(...)*pow(...). */
10483 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10484 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10485 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10487 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10488 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10489 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10490 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10492 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10493 if (operand_equal_p (arg01, arg11, 0))
10495 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10496 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10498 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10501 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10502 if (operand_equal_p (arg00, arg10, 0))
10504 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10505 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10507 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10511 /* Optimize tan(x)*cos(x) as sin(x). */
10512 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10513 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10514 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10515 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10516 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10517 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10518 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10519 CALL_EXPR_ARG (arg1, 0), 0))
10521 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10523 if (sinfn != NULL_TREE)
10524 return build_call_expr_loc (loc, sinfn, 1,
10525 CALL_EXPR_ARG (arg0, 0));
10528 /* Optimize x*pow(x,c) as pow(x,c+1). */
10529 if (fcode1 == BUILT_IN_POW
10530 || fcode1 == BUILT_IN_POWF
10531 || fcode1 == BUILT_IN_POWL)
10533 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10534 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10535 if (TREE_CODE (arg11) == REAL_CST
10536 && !TREE_OVERFLOW (arg11)
10537 && operand_equal_p (arg0, arg10, 0))
10539 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10543 c = TREE_REAL_CST (arg11);
10544 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10545 arg = build_real (type, c);
10546 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10550 /* Optimize pow(x,c)*x as pow(x,c+1). */
10551 if (fcode0 == BUILT_IN_POW
10552 || fcode0 == BUILT_IN_POWF
10553 || fcode0 == BUILT_IN_POWL)
10555 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10556 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10557 if (TREE_CODE (arg01) == REAL_CST
10558 && !TREE_OVERFLOW (arg01)
10559 && operand_equal_p (arg1, arg00, 0))
10561 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10565 c = TREE_REAL_CST (arg01);
10566 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10567 arg = build_real (type, c);
10568 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10572 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10573 if (optimize_function_for_speed_p (cfun)
10574 && operand_equal_p (arg0, arg1, 0))
10576 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10580 tree arg = build_real (type, dconst2);
10581 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10590 if (integer_all_onesp (arg1))
10591 return omit_one_operand_loc (loc, type, arg1, arg0);
10592 if (integer_zerop (arg1))
10593 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10594 if (operand_equal_p (arg0, arg1, 0))
10595 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10597 /* ~X | X is -1. */
10598 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10599 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10601 t1 = build_zero_cst (type);
10602 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10603 return omit_one_operand_loc (loc, type, t1, arg1);
10606 /* X | ~X is -1. */
10607 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10608 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10610 t1 = build_zero_cst (type);
10611 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10612 return omit_one_operand_loc (loc, type, t1, arg0);
10615 /* Canonicalize (X & C1) | C2. */
10616 if (TREE_CODE (arg0) == BIT_AND_EXPR
10617 && TREE_CODE (arg1) == INTEGER_CST
10618 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10620 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10621 int width = TYPE_PRECISION (type), w;
10622 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10623 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10624 hi2 = TREE_INT_CST_HIGH (arg1);
10625 lo2 = TREE_INT_CST_LOW (arg1);
10627 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10628 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10629 return omit_one_operand_loc (loc, type, arg1,
10630 TREE_OPERAND (arg0, 0));
10632 if (width > HOST_BITS_PER_WIDE_INT)
10634 mhi = (unsigned HOST_WIDE_INT) -1
10635 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10641 mlo = (unsigned HOST_WIDE_INT) -1
10642 >> (HOST_BITS_PER_WIDE_INT - width);
10645 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10646 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10647 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10648 TREE_OPERAND (arg0, 0), arg1);
10650 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10651 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10652 mode which allows further optimizations. */
10659 for (w = BITS_PER_UNIT;
10660 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10663 unsigned HOST_WIDE_INT mask
10664 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10665 if (((lo1 | lo2) & mask) == mask
10666 && (lo1 & ~mask) == 0 && hi1 == 0)
10673 if (hi3 != hi1 || lo3 != lo1)
10674 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10675 fold_build2_loc (loc, BIT_AND_EXPR, type,
10676 TREE_OPERAND (arg0, 0),
10677 build_int_cst_wide (type,
10682 /* (X & Y) | Y is (X, Y). */
10683 if (TREE_CODE (arg0) == BIT_AND_EXPR
10684 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10685 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10686 /* (X & Y) | X is (Y, X). */
10687 if (TREE_CODE (arg0) == BIT_AND_EXPR
10688 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10689 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10690 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10691 /* X | (X & Y) is (Y, X). */
10692 if (TREE_CODE (arg1) == BIT_AND_EXPR
10693 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10694 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10695 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10696 /* X | (Y & X) is (Y, X). */
10697 if (TREE_CODE (arg1) == BIT_AND_EXPR
10698 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10699 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10700 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10702 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10703 if (t1 != NULL_TREE)
10706 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10708 This results in more efficient code for machines without a NAND
10709 instruction. Combine will canonicalize to the first form
10710 which will allow use of NAND instructions provided by the
10711 backend if they exist. */
10712 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10713 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10716 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10717 build2 (BIT_AND_EXPR, type,
10718 fold_convert_loc (loc, type,
10719 TREE_OPERAND (arg0, 0)),
10720 fold_convert_loc (loc, type,
10721 TREE_OPERAND (arg1, 0))));
10724 /* See if this can be simplified into a rotate first. If that
10725 is unsuccessful continue in the association code. */
10729 if (integer_zerop (arg1))
10730 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10731 if (integer_all_onesp (arg1))
10732 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10733 if (operand_equal_p (arg0, arg1, 0))
10734 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10736 /* ~X ^ X is -1. */
10737 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10738 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10740 t1 = build_zero_cst (type);
10741 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10742 return omit_one_operand_loc (loc, type, t1, arg1);
10745 /* X ^ ~X is -1. */
10746 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10747 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10749 t1 = build_zero_cst (type);
10750 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10751 return omit_one_operand_loc (loc, type, t1, arg0);
10754 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10755 with a constant, and the two constants have no bits in common,
10756 we should treat this as a BIT_IOR_EXPR since this may produce more
10757 simplifications. */
10758 if (TREE_CODE (arg0) == BIT_AND_EXPR
10759 && TREE_CODE (arg1) == BIT_AND_EXPR
10760 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10761 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10762 && integer_zerop (const_binop (BIT_AND_EXPR,
10763 TREE_OPERAND (arg0, 1),
10764 TREE_OPERAND (arg1, 1))))
10766 code = BIT_IOR_EXPR;
10770 /* (X | Y) ^ X -> Y & ~ X*/
10771 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10772 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10774 tree t2 = TREE_OPERAND (arg0, 1);
10775 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10777 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10778 fold_convert_loc (loc, type, t2),
10779 fold_convert_loc (loc, type, t1));
10783 /* (Y | X) ^ X -> Y & ~ X*/
10784 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10785 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10787 tree t2 = TREE_OPERAND (arg0, 0);
10788 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10790 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10791 fold_convert_loc (loc, type, t2),
10792 fold_convert_loc (loc, type, t1));
10796 /* X ^ (X | Y) -> Y & ~ X*/
10797 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10798 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10800 tree t2 = TREE_OPERAND (arg1, 1);
10801 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10803 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10804 fold_convert_loc (loc, type, t2),
10805 fold_convert_loc (loc, type, t1));
10809 /* X ^ (Y | X) -> Y & ~ X*/
10810 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10811 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10813 tree t2 = TREE_OPERAND (arg1, 0);
10814 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10816 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10817 fold_convert_loc (loc, type, t2),
10818 fold_convert_loc (loc, type, t1));
10822 /* Convert ~X ^ ~Y to X ^ Y. */
10823 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10824 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10825 return fold_build2_loc (loc, code, type,
10826 fold_convert_loc (loc, type,
10827 TREE_OPERAND (arg0, 0)),
10828 fold_convert_loc (loc, type,
10829 TREE_OPERAND (arg1, 0)));
10831 /* Convert ~X ^ C to X ^ ~C. */
10832 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10833 && TREE_CODE (arg1) == INTEGER_CST)
10834 return fold_build2_loc (loc, code, type,
10835 fold_convert_loc (loc, type,
10836 TREE_OPERAND (arg0, 0)),
10837 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10839 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10840 if (TREE_CODE (arg0) == BIT_AND_EXPR
10841 && integer_onep (TREE_OPERAND (arg0, 1))
10842 && integer_onep (arg1))
10843 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10844 build_int_cst (TREE_TYPE (arg0), 0));
10846 /* Fold (X & Y) ^ Y as ~X & Y. */
10847 if (TREE_CODE (arg0) == BIT_AND_EXPR
10848 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10850 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10851 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10852 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10853 fold_convert_loc (loc, type, arg1));
10855 /* Fold (X & Y) ^ X as ~Y & X. */
10856 if (TREE_CODE (arg0) == BIT_AND_EXPR
10857 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10858 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10860 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10861 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10862 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10863 fold_convert_loc (loc, type, arg1));
10865 /* Fold X ^ (X & Y) as X & ~Y. */
10866 if (TREE_CODE (arg1) == BIT_AND_EXPR
10867 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10869 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10870 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10871 fold_convert_loc (loc, type, arg0),
10872 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10874 /* Fold X ^ (Y & X) as ~Y & X. */
10875 if (TREE_CODE (arg1) == BIT_AND_EXPR
10876 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10877 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10879 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10880 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10881 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10882 fold_convert_loc (loc, type, arg0));
10885 /* See if this can be simplified into a rotate first. If that
10886 is unsuccessful continue in the association code. */
10890 if (integer_all_onesp (arg1))
10891 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10892 if (integer_zerop (arg1))
10893 return omit_one_operand_loc (loc, type, arg1, arg0);
10894 if (operand_equal_p (arg0, arg1, 0))
10895 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10897 /* ~X & X is always zero. */
10898 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10899 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10900 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10902 /* X & ~X is always zero. */
10903 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10904 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10905 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10907 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10908 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10909 && TREE_CODE (arg1) == INTEGER_CST
10910 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10912 tree tmp1 = fold_convert_loc (loc, type, arg1);
10913 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10914 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10915 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10916 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10918 fold_convert_loc (loc, type,
10919 fold_build2_loc (loc, BIT_IOR_EXPR,
10920 type, tmp2, tmp3));
10923 /* (X | Y) & Y is (X, Y). */
10924 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10925 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10926 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10927 /* (X | Y) & X is (Y, X). */
10928 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10929 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10930 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10931 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10932 /* X & (X | Y) is (Y, X). */
10933 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10934 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10935 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10936 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10937 /* X & (Y | X) is (Y, X). */
10938 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10939 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10940 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10941 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10943 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10944 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10945 && integer_onep (TREE_OPERAND (arg0, 1))
10946 && integer_onep (arg1))
10948 tem = TREE_OPERAND (arg0, 0);
10949 return fold_build2_loc (loc, EQ_EXPR, type,
10950 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10951 build_int_cst (TREE_TYPE (tem), 1)),
10952 build_int_cst (TREE_TYPE (tem), 0));
10954 /* Fold ~X & 1 as (X & 1) == 0. */
10955 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10956 && integer_onep (arg1))
10958 tem = TREE_OPERAND (arg0, 0);
10959 return fold_build2_loc (loc, EQ_EXPR, type,
10960 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10961 build_int_cst (TREE_TYPE (tem), 1)),
10962 build_int_cst (TREE_TYPE (tem), 0));
10965 /* Fold (X ^ Y) & Y as ~X & Y. */
10966 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10967 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10969 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10970 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10971 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10972 fold_convert_loc (loc, type, arg1));
10974 /* Fold (X ^ Y) & X as ~Y & X. */
10975 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10976 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10977 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10979 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10980 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10981 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10982 fold_convert_loc (loc, type, arg1));
10984 /* Fold X & (X ^ Y) as X & ~Y. */
10985 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10986 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10988 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10989 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10990 fold_convert_loc (loc, type, arg0),
10991 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10993 /* Fold X & (Y ^ X) as ~Y & X. */
10994 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10995 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10996 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10998 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10999 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11000 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11001 fold_convert_loc (loc, type, arg0));
11004 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11005 ((A & N) + B) & M -> (A + B) & M
11006 Similarly if (N & M) == 0,
11007 ((A | N) + B) & M -> (A + B) & M
11008 and for - instead of + (or unary - instead of +)
11009 and/or ^ instead of |.
11010 If B is constant and (B & M) == 0, fold into A & M. */
11011 if (host_integerp (arg1, 1))
11013 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11014 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11015 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11016 && (TREE_CODE (arg0) == PLUS_EXPR
11017 || TREE_CODE (arg0) == MINUS_EXPR
11018 || TREE_CODE (arg0) == NEGATE_EXPR)
11019 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11020 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11024 unsigned HOST_WIDE_INT cst0;
11026 /* Now we know that arg0 is (C + D) or (C - D) or
11027 -C and arg1 (M) is == (1LL << cst) - 1.
11028 Store C into PMOP[0] and D into PMOP[1]. */
11029 pmop[0] = TREE_OPERAND (arg0, 0);
11031 if (TREE_CODE (arg0) != NEGATE_EXPR)
11033 pmop[1] = TREE_OPERAND (arg0, 1);
11037 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11038 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11042 for (; which >= 0; which--)
11043 switch (TREE_CODE (pmop[which]))
11048 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11051 /* tree_low_cst not used, because we don't care about
11053 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11055 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11060 else if (cst0 != 0)
11062 /* If C or D is of the form (A & N) where
11063 (N & M) == M, or of the form (A | N) or
11064 (A ^ N) where (N & M) == 0, replace it with A. */
11065 pmop[which] = TREE_OPERAND (pmop[which], 0);
11068 /* If C or D is a N where (N & M) == 0, it can be
11069 omitted (assumed 0). */
11070 if ((TREE_CODE (arg0) == PLUS_EXPR
11071 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11072 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11073 pmop[which] = NULL;
11079 /* Only build anything new if we optimized one or both arguments
11081 if (pmop[0] != TREE_OPERAND (arg0, 0)
11082 || (TREE_CODE (arg0) != NEGATE_EXPR
11083 && pmop[1] != TREE_OPERAND (arg0, 1)))
11085 tree utype = TREE_TYPE (arg0);
11086 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11088 /* Perform the operations in a type that has defined
11089 overflow behavior. */
11090 utype = unsigned_type_for (TREE_TYPE (arg0));
11091 if (pmop[0] != NULL)
11092 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11093 if (pmop[1] != NULL)
11094 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11097 if (TREE_CODE (arg0) == NEGATE_EXPR)
11098 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11099 else if (TREE_CODE (arg0) == PLUS_EXPR)
11101 if (pmop[0] != NULL && pmop[1] != NULL)
11102 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11104 else if (pmop[0] != NULL)
11106 else if (pmop[1] != NULL)
11109 return build_int_cst (type, 0);
11111 else if (pmop[0] == NULL)
11112 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11114 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11116 /* TEM is now the new binary +, - or unary - replacement. */
11117 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11118 fold_convert_loc (loc, utype, arg1));
11119 return fold_convert_loc (loc, type, tem);
11124 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11125 if (t1 != NULL_TREE)
11127 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11128 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11129 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11132 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11134 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11135 && (~TREE_INT_CST_LOW (arg1)
11136 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11138 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11141 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11143 This results in more efficient code for machines without a NOR
11144 instruction. Combine will canonicalize to the first form
11145 which will allow use of NOR instructions provided by the
11146 backend if they exist. */
11147 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11148 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11150 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11151 build2 (BIT_IOR_EXPR, type,
11152 fold_convert_loc (loc, type,
11153 TREE_OPERAND (arg0, 0)),
11154 fold_convert_loc (loc, type,
11155 TREE_OPERAND (arg1, 0))));
11158 /* If arg0 is derived from the address of an object or function, we may
11159 be able to fold this expression using the object or function's
11161 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11163 unsigned HOST_WIDE_INT modulus, residue;
11164 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11166 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11167 integer_onep (arg1));
11169 /* This works because modulus is a power of 2. If this weren't the
11170 case, we'd have to replace it by its greatest power-of-2
11171 divisor: modulus & -modulus. */
11173 return build_int_cst (type, residue & low);
11176 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11177 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11178 if the new mask might be further optimized. */
11179 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11180 || TREE_CODE (arg0) == RSHIFT_EXPR)
11181 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11182 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11183 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11184 < TYPE_PRECISION (TREE_TYPE (arg0))
11185 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11186 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11188 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11189 unsigned HOST_WIDE_INT mask
11190 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11191 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11192 tree shift_type = TREE_TYPE (arg0);
11194 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11195 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11196 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11197 && TYPE_PRECISION (TREE_TYPE (arg0))
11198 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11200 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11201 tree arg00 = TREE_OPERAND (arg0, 0);
11202 /* See if more bits can be proven as zero because of
11204 if (TREE_CODE (arg00) == NOP_EXPR
11205 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11207 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11208 if (TYPE_PRECISION (inner_type)
11209 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11210 && TYPE_PRECISION (inner_type) < prec)
11212 prec = TYPE_PRECISION (inner_type);
11213 /* See if we can shorten the right shift. */
11215 shift_type = inner_type;
11218 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11219 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11220 zerobits <<= prec - shiftc;
11221 /* For arithmetic shift if sign bit could be set, zerobits
11222 can contain actually sign bits, so no transformation is
11223 possible, unless MASK masks them all away. In that
11224 case the shift needs to be converted into logical shift. */
11225 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11226 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11228 if ((mask & zerobits) == 0)
11229 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11235 /* ((X << 16) & 0xff00) is (X, 0). */
11236 if ((mask & zerobits) == mask)
11237 return omit_one_operand_loc (loc, type,
11238 build_int_cst (type, 0), arg0);
11240 newmask = mask | zerobits;
11241 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11245 /* Only do the transformation if NEWMASK is some integer
11247 for (prec = BITS_PER_UNIT;
11248 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11249 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11251 if (prec < HOST_BITS_PER_WIDE_INT
11252 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11256 if (shift_type != TREE_TYPE (arg0))
11258 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11259 fold_convert_loc (loc, shift_type,
11260 TREE_OPERAND (arg0, 0)),
11261 TREE_OPERAND (arg0, 1));
11262 tem = fold_convert_loc (loc, type, tem);
11266 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11267 if (!tree_int_cst_equal (newmaskt, arg1))
11268 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11276 /* Don't touch a floating-point divide by zero unless the mode
11277 of the constant can represent infinity. */
11278 if (TREE_CODE (arg1) == REAL_CST
11279 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11280 && real_zerop (arg1))
11283 /* Optimize A / A to 1.0 if we don't care about
11284 NaNs or Infinities. Skip the transformation
11285 for non-real operands. */
11286 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11287 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11288 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11289 && operand_equal_p (arg0, arg1, 0))
11291 tree r = build_real (TREE_TYPE (arg0), dconst1);
11293 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11296 /* The complex version of the above A / A optimization. */
11297 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11298 && operand_equal_p (arg0, arg1, 0))
11300 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11301 if (! HONOR_NANS (TYPE_MODE (elem_type))
11302 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11304 tree r = build_real (elem_type, dconst1);
11305 /* omit_two_operands will call fold_convert for us. */
11306 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11310 /* (-A) / (-B) -> A / B */
11311 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11312 return fold_build2_loc (loc, RDIV_EXPR, type,
11313 TREE_OPERAND (arg0, 0),
11314 negate_expr (arg1));
11315 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11316 return fold_build2_loc (loc, RDIV_EXPR, type,
11317 negate_expr (arg0),
11318 TREE_OPERAND (arg1, 0));
11320 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11321 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11322 && real_onep (arg1))
11323 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11325 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11326 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11327 && real_minus_onep (arg1))
11328 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11329 negate_expr (arg0)));
11331 /* If ARG1 is a constant, we can convert this to a multiply by the
11332 reciprocal. This does not have the same rounding properties,
11333 so only do this if -freciprocal-math. We can actually
11334 always safely do it if ARG1 is a power of two, but it's hard to
11335 tell if it is or not in a portable manner. */
11336 if (TREE_CODE (arg1) == REAL_CST)
11338 if (flag_reciprocal_math
11339 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11341 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11342 /* Find the reciprocal if optimizing and the result is exact. */
11346 r = TREE_REAL_CST (arg1);
11347 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11349 tem = build_real (type, r);
11350 return fold_build2_loc (loc, MULT_EXPR, type,
11351 fold_convert_loc (loc, type, arg0), tem);
11355 /* Convert A/B/C to A/(B*C). */
11356 if (flag_reciprocal_math
11357 && TREE_CODE (arg0) == RDIV_EXPR)
11358 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11359 fold_build2_loc (loc, MULT_EXPR, type,
11360 TREE_OPERAND (arg0, 1), arg1));
11362 /* Convert A/(B/C) to (A/B)*C. */
11363 if (flag_reciprocal_math
11364 && TREE_CODE (arg1) == RDIV_EXPR)
11365 return fold_build2_loc (loc, MULT_EXPR, type,
11366 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11367 TREE_OPERAND (arg1, 0)),
11368 TREE_OPERAND (arg1, 1));
11370 /* Convert C1/(X*C2) into (C1/C2)/X. */
11371 if (flag_reciprocal_math
11372 && TREE_CODE (arg1) == MULT_EXPR
11373 && TREE_CODE (arg0) == REAL_CST
11374 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11376 tree tem = const_binop (RDIV_EXPR, arg0,
11377 TREE_OPERAND (arg1, 1));
11379 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11380 TREE_OPERAND (arg1, 0));
11383 if (flag_unsafe_math_optimizations)
11385 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11386 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11388 /* Optimize sin(x)/cos(x) as tan(x). */
11389 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11390 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11391 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11392 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11393 CALL_EXPR_ARG (arg1, 0), 0))
11395 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11397 if (tanfn != NULL_TREE)
11398 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11401 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11402 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11403 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11404 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11405 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11406 CALL_EXPR_ARG (arg1, 0), 0))
11408 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11410 if (tanfn != NULL_TREE)
11412 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11413 CALL_EXPR_ARG (arg0, 0));
11414 return fold_build2_loc (loc, RDIV_EXPR, type,
11415 build_real (type, dconst1), tmp);
11419 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11420 NaNs or Infinities. */
11421 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11422 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11423 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11425 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11426 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11428 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11429 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11430 && operand_equal_p (arg00, arg01, 0))
11432 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11434 if (cosfn != NULL_TREE)
11435 return build_call_expr_loc (loc, cosfn, 1, arg00);
11439 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11440 NaNs or Infinities. */
11441 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11442 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11443 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11445 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11446 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11448 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11449 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11450 && operand_equal_p (arg00, arg01, 0))
11452 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11454 if (cosfn != NULL_TREE)
11456 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11457 return fold_build2_loc (loc, RDIV_EXPR, type,
11458 build_real (type, dconst1),
11464 /* Optimize pow(x,c)/x as pow(x,c-1). */
11465 if (fcode0 == BUILT_IN_POW
11466 || fcode0 == BUILT_IN_POWF
11467 || fcode0 == BUILT_IN_POWL)
11469 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11470 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11471 if (TREE_CODE (arg01) == REAL_CST
11472 && !TREE_OVERFLOW (arg01)
11473 && operand_equal_p (arg1, arg00, 0))
11475 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11479 c = TREE_REAL_CST (arg01);
11480 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11481 arg = build_real (type, c);
11482 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11486 /* Optimize a/root(b/c) into a*root(c/b). */
11487 if (BUILTIN_ROOT_P (fcode1))
11489 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11491 if (TREE_CODE (rootarg) == RDIV_EXPR)
11493 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11494 tree b = TREE_OPERAND (rootarg, 0);
11495 tree c = TREE_OPERAND (rootarg, 1);
11497 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11499 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11500 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11504 /* Optimize x/expN(y) into x*expN(-y). */
11505 if (BUILTIN_EXPONENT_P (fcode1))
11507 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11508 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11509 arg1 = build_call_expr_loc (loc,
11511 fold_convert_loc (loc, type, arg));
11512 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11515 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11516 if (fcode1 == BUILT_IN_POW
11517 || fcode1 == BUILT_IN_POWF
11518 || fcode1 == BUILT_IN_POWL)
11520 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11521 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11522 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11523 tree neg11 = fold_convert_loc (loc, type,
11524 negate_expr (arg11));
11525 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11526 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11531 case TRUNC_DIV_EXPR:
11532 /* Optimize (X & (-A)) / A where A is a power of 2,
11534 if (TREE_CODE (arg0) == BIT_AND_EXPR
11535 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11536 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11538 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11539 arg1, TREE_OPERAND (arg0, 1));
11540 if (sum && integer_zerop (sum)) {
11541 unsigned long pow2;
11543 if (TREE_INT_CST_LOW (arg1))
11544 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11546 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11547 + HOST_BITS_PER_WIDE_INT;
11549 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11550 TREE_OPERAND (arg0, 0),
11551 build_int_cst (NULL_TREE, pow2));
11557 case FLOOR_DIV_EXPR:
11558 /* Simplify A / (B << N) where A and B are positive and B is
11559 a power of 2, to A >> (N + log2(B)). */
11560 strict_overflow_p = false;
11561 if (TREE_CODE (arg1) == LSHIFT_EXPR
11562 && (TYPE_UNSIGNED (type)
11563 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11565 tree sval = TREE_OPERAND (arg1, 0);
11566 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11568 tree sh_cnt = TREE_OPERAND (arg1, 1);
11569 unsigned long pow2;
11571 if (TREE_INT_CST_LOW (sval))
11572 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11574 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11575 + HOST_BITS_PER_WIDE_INT;
11577 if (strict_overflow_p)
11578 fold_overflow_warning (("assuming signed overflow does not "
11579 "occur when simplifying A / (B << N)"),
11580 WARN_STRICT_OVERFLOW_MISC);
11582 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11583 sh_cnt, build_int_cst (NULL_TREE, pow2));
11584 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11585 fold_convert_loc (loc, type, arg0), sh_cnt);
11589 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11590 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11591 if (INTEGRAL_TYPE_P (type)
11592 && TYPE_UNSIGNED (type)
11593 && code == FLOOR_DIV_EXPR)
11594 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11598 case ROUND_DIV_EXPR:
11599 case CEIL_DIV_EXPR:
11600 case EXACT_DIV_EXPR:
11601 if (integer_onep (arg1))
11602 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11603 if (integer_zerop (arg1))
11605 /* X / -1 is -X. */
11606 if (!TYPE_UNSIGNED (type)
11607 && TREE_CODE (arg1) == INTEGER_CST
11608 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11609 && TREE_INT_CST_HIGH (arg1) == -1)
11610 return fold_convert_loc (loc, type, negate_expr (arg0));
11612 /* Convert -A / -B to A / B when the type is signed and overflow is
11614 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11615 && TREE_CODE (arg0) == NEGATE_EXPR
11616 && negate_expr_p (arg1))
11618 if (INTEGRAL_TYPE_P (type))
11619 fold_overflow_warning (("assuming signed overflow does not occur "
11620 "when distributing negation across "
11622 WARN_STRICT_OVERFLOW_MISC);
11623 return fold_build2_loc (loc, code, type,
11624 fold_convert_loc (loc, type,
11625 TREE_OPERAND (arg0, 0)),
11626 fold_convert_loc (loc, type,
11627 negate_expr (arg1)));
11629 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11630 && TREE_CODE (arg1) == NEGATE_EXPR
11631 && negate_expr_p (arg0))
11633 if (INTEGRAL_TYPE_P (type))
11634 fold_overflow_warning (("assuming signed overflow does not occur "
11635 "when distributing negation across "
11637 WARN_STRICT_OVERFLOW_MISC);
11638 return fold_build2_loc (loc, code, type,
11639 fold_convert_loc (loc, type,
11640 negate_expr (arg0)),
11641 fold_convert_loc (loc, type,
11642 TREE_OPERAND (arg1, 0)));
11645 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11646 operation, EXACT_DIV_EXPR.
11648 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11649 At one time others generated faster code, it's not clear if they do
11650 after the last round to changes to the DIV code in expmed.c. */
11651 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11652 && multiple_of_p (type, arg0, arg1))
11653 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11655 strict_overflow_p = false;
11656 if (TREE_CODE (arg1) == INTEGER_CST
11657 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11658 &strict_overflow_p)))
11660 if (strict_overflow_p)
11661 fold_overflow_warning (("assuming signed overflow does not occur "
11662 "when simplifying division"),
11663 WARN_STRICT_OVERFLOW_MISC);
11664 return fold_convert_loc (loc, type, tem);
11669 case CEIL_MOD_EXPR:
11670 case FLOOR_MOD_EXPR:
11671 case ROUND_MOD_EXPR:
11672 case TRUNC_MOD_EXPR:
11673 /* X % 1 is always zero, but be sure to preserve any side
11675 if (integer_onep (arg1))
11676 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11678 /* X % 0, return X % 0 unchanged so that we can get the
11679 proper warnings and errors. */
11680 if (integer_zerop (arg1))
11683 /* 0 % X is always zero, but be sure to preserve any side
11684 effects in X. Place this after checking for X == 0. */
11685 if (integer_zerop (arg0))
11686 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11688 /* X % -1 is zero. */
11689 if (!TYPE_UNSIGNED (type)
11690 && TREE_CODE (arg1) == INTEGER_CST
11691 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11692 && TREE_INT_CST_HIGH (arg1) == -1)
11693 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11695 /* X % -C is the same as X % C. */
11696 if (code == TRUNC_MOD_EXPR
11697 && !TYPE_UNSIGNED (type)
11698 && TREE_CODE (arg1) == INTEGER_CST
11699 && !TREE_OVERFLOW (arg1)
11700 && TREE_INT_CST_HIGH (arg1) < 0
11701 && !TYPE_OVERFLOW_TRAPS (type)
11702 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11703 && !sign_bit_p (arg1, arg1))
11704 return fold_build2_loc (loc, code, type,
11705 fold_convert_loc (loc, type, arg0),
11706 fold_convert_loc (loc, type,
11707 negate_expr (arg1)));
11709 /* X % -Y is the same as X % Y. */
11710 if (code == TRUNC_MOD_EXPR
11711 && !TYPE_UNSIGNED (type)
11712 && TREE_CODE (arg1) == NEGATE_EXPR
11713 && !TYPE_OVERFLOW_TRAPS (type))
11714 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11715 fold_convert_loc (loc, type,
11716 TREE_OPERAND (arg1, 0)));
11718 strict_overflow_p = false;
11719 if (TREE_CODE (arg1) == INTEGER_CST
11720 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11721 &strict_overflow_p)))
11723 if (strict_overflow_p)
11724 fold_overflow_warning (("assuming signed overflow does not occur "
11725 "when simplifying modulus"),
11726 WARN_STRICT_OVERFLOW_MISC);
11727 return fold_convert_loc (loc, type, tem);
11730 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11731 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11732 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11733 && (TYPE_UNSIGNED (type)
11734 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11737 /* Also optimize A % (C << N) where C is a power of 2,
11738 to A & ((C << N) - 1). */
11739 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11740 c = TREE_OPERAND (arg1, 0);
11742 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11745 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11746 build_int_cst (TREE_TYPE (arg1), 1));
11747 if (strict_overflow_p)
11748 fold_overflow_warning (("assuming signed overflow does not "
11749 "occur when simplifying "
11750 "X % (power of two)"),
11751 WARN_STRICT_OVERFLOW_MISC);
11752 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11753 fold_convert_loc (loc, type, arg0),
11754 fold_convert_loc (loc, type, mask));
11762 if (integer_all_onesp (arg0))
11763 return omit_one_operand_loc (loc, type, arg0, arg1);
11767 /* Optimize -1 >> x for arithmetic right shifts. */
11768 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11769 && tree_expr_nonnegative_p (arg1))
11770 return omit_one_operand_loc (loc, type, arg0, arg1);
11771 /* ... fall through ... */
11775 if (integer_zerop (arg1))
11776 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11777 if (integer_zerop (arg0))
11778 return omit_one_operand_loc (loc, type, arg0, arg1);
11780 /* Since negative shift count is not well-defined,
11781 don't try to compute it in the compiler. */
11782 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11785 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11786 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11787 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11788 && host_integerp (TREE_OPERAND (arg0, 1), false)
11789 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11791 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11792 + TREE_INT_CST_LOW (arg1));
11794 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11795 being well defined. */
11796 if (low >= TYPE_PRECISION (type))
11798 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11799 low = low % TYPE_PRECISION (type);
11800 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11801 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11802 TREE_OPERAND (arg0, 0));
11804 low = TYPE_PRECISION (type) - 1;
11807 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11808 build_int_cst (type, low));
11811 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11812 into x & ((unsigned)-1 >> c) for unsigned types. */
11813 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11814 || (TYPE_UNSIGNED (type)
11815 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11816 && host_integerp (arg1, false)
11817 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11818 && host_integerp (TREE_OPERAND (arg0, 1), false)
11819 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11821 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11822 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11828 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11830 lshift = build_int_cst (type, -1);
11831 lshift = int_const_binop (code, lshift, arg1, 0);
11833 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11837 /* Rewrite an LROTATE_EXPR by a constant into an
11838 RROTATE_EXPR by a new constant. */
11839 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11841 tree tem = build_int_cst (TREE_TYPE (arg1),
11842 TYPE_PRECISION (type));
11843 tem = const_binop (MINUS_EXPR, tem, arg1);
11844 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11847 /* If we have a rotate of a bit operation with the rotate count and
11848 the second operand of the bit operation both constant,
11849 permute the two operations. */
11850 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11851 && (TREE_CODE (arg0) == BIT_AND_EXPR
11852 || TREE_CODE (arg0) == BIT_IOR_EXPR
11853 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11854 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11855 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11856 fold_build2_loc (loc, code, type,
11857 TREE_OPERAND (arg0, 0), arg1),
11858 fold_build2_loc (loc, code, type,
11859 TREE_OPERAND (arg0, 1), arg1));
11861 /* Two consecutive rotates adding up to the precision of the
11862 type can be ignored. */
11863 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11864 && TREE_CODE (arg0) == RROTATE_EXPR
11865 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11866 && TREE_INT_CST_HIGH (arg1) == 0
11867 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11868 && ((TREE_INT_CST_LOW (arg1)
11869 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11870 == (unsigned int) TYPE_PRECISION (type)))
11871 return TREE_OPERAND (arg0, 0);
11873 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11874 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11875 if the latter can be further optimized. */
11876 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11877 && TREE_CODE (arg0) == BIT_AND_EXPR
11878 && TREE_CODE (arg1) == INTEGER_CST
11879 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11881 tree mask = fold_build2_loc (loc, code, type,
11882 fold_convert_loc (loc, type,
11883 TREE_OPERAND (arg0, 1)),
11885 tree shift = fold_build2_loc (loc, code, type,
11886 fold_convert_loc (loc, type,
11887 TREE_OPERAND (arg0, 0)),
11889 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11897 if (operand_equal_p (arg0, arg1, 0))
11898 return omit_one_operand_loc (loc, type, arg0, arg1);
11899 if (INTEGRAL_TYPE_P (type)
11900 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11901 return omit_one_operand_loc (loc, type, arg1, arg0);
11902 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11908 if (operand_equal_p (arg0, arg1, 0))
11909 return omit_one_operand_loc (loc, type, arg0, arg1);
11910 if (INTEGRAL_TYPE_P (type)
11911 && TYPE_MAX_VALUE (type)
11912 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11913 return omit_one_operand_loc (loc, type, arg1, arg0);
11914 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11919 case TRUTH_ANDIF_EXPR:
11920 /* Note that the operands of this must be ints
11921 and their values must be 0 or 1.
11922 ("true" is a fixed value perhaps depending on the language.) */
11923 /* If first arg is constant zero, return it. */
11924 if (integer_zerop (arg0))
11925 return fold_convert_loc (loc, type, arg0);
11926 case TRUTH_AND_EXPR:
11927 /* If either arg is constant true, drop it. */
11928 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11929 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11930 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11931 /* Preserve sequence points. */
11932 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11933 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11934 /* If second arg is constant zero, result is zero, but first arg
11935 must be evaluated. */
11936 if (integer_zerop (arg1))
11937 return omit_one_operand_loc (loc, type, arg1, arg0);
11938 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11939 case will be handled here. */
11940 if (integer_zerop (arg0))
11941 return omit_one_operand_loc (loc, type, arg0, arg1);
11943 /* !X && X is always false. */
11944 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11945 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11946 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11947 /* X && !X is always false. */
11948 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11949 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11950 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11952 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11953 means A >= Y && A != MAX, but in this case we know that
11956 if (!TREE_SIDE_EFFECTS (arg0)
11957 && !TREE_SIDE_EFFECTS (arg1))
11959 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11960 if (tem && !operand_equal_p (tem, arg0, 0))
11961 return fold_build2_loc (loc, code, type, tem, arg1);
11963 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11964 if (tem && !operand_equal_p (tem, arg1, 0))
11965 return fold_build2_loc (loc, code, type, arg0, tem);
11969 /* We only do these simplifications if we are optimizing. */
11973 /* Check for things like (A || B) && (A || C). We can convert this
11974 to A || (B && C). Note that either operator can be any of the four
11975 truth and/or operations and the transformation will still be
11976 valid. Also note that we only care about order for the
11977 ANDIF and ORIF operators. If B contains side effects, this
11978 might change the truth-value of A. */
11979 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11980 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11981 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11982 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11983 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11984 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11986 tree a00 = TREE_OPERAND (arg0, 0);
11987 tree a01 = TREE_OPERAND (arg0, 1);
11988 tree a10 = TREE_OPERAND (arg1, 0);
11989 tree a11 = TREE_OPERAND (arg1, 1);
11990 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11991 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11992 && (code == TRUTH_AND_EXPR
11993 || code == TRUTH_OR_EXPR));
11995 if (operand_equal_p (a00, a10, 0))
11996 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11997 fold_build2_loc (loc, code, type, a01, a11));
11998 else if (commutative && operand_equal_p (a00, a11, 0))
11999 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12000 fold_build2_loc (loc, code, type, a01, a10));
12001 else if (commutative && operand_equal_p (a01, a10, 0))
12002 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12003 fold_build2_loc (loc, code, type, a00, a11));
12005 /* This case if tricky because we must either have commutative
12006 operators or else A10 must not have side-effects. */
12008 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12009 && operand_equal_p (a01, a11, 0))
12010 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12011 fold_build2_loc (loc, code, type, a00, a10),
12015 /* See if we can build a range comparison. */
12016 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12019 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12020 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12022 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12024 return fold_build2_loc (loc, code, type, tem, arg1);
12027 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12028 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12030 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12032 return fold_build2_loc (loc, code, type, arg0, tem);
12035 /* Check for the possibility of merging component references. If our
12036 lhs is another similar operation, try to merge its rhs with our
12037 rhs. Then try to merge our lhs and rhs. */
12038 if (TREE_CODE (arg0) == code
12039 && 0 != (tem = fold_truthop (loc, code, type,
12040 TREE_OPERAND (arg0, 1), arg1)))
12041 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12043 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12048 case TRUTH_ORIF_EXPR:
12049 /* Note that the operands of this must be ints
12050 and their values must be 0 or true.
12051 ("true" is a fixed value perhaps depending on the language.) */
12052 /* If first arg is constant true, return it. */
12053 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12054 return fold_convert_loc (loc, type, arg0);
12055 case TRUTH_OR_EXPR:
12056 /* If either arg is constant zero, drop it. */
12057 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12058 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12059 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12060 /* Preserve sequence points. */
12061 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12062 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12063 /* If second arg is constant true, result is true, but we must
12064 evaluate first arg. */
12065 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12066 return omit_one_operand_loc (loc, type, arg1, arg0);
12067 /* Likewise for first arg, but note this only occurs here for
12069 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12070 return omit_one_operand_loc (loc, type, arg0, arg1);
12072 /* !X || X is always true. */
12073 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12074 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12075 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12076 /* X || !X is always true. */
12077 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12078 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12079 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12083 case TRUTH_XOR_EXPR:
12084 /* If the second arg is constant zero, drop it. */
12085 if (integer_zerop (arg1))
12086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12087 /* If the second arg is constant true, this is a logical inversion. */
12088 if (integer_onep (arg1))
12090 /* Only call invert_truthvalue if operand is a truth value. */
12091 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12092 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12094 tem = invert_truthvalue_loc (loc, arg0);
12095 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12097 /* Identical arguments cancel to zero. */
12098 if (operand_equal_p (arg0, arg1, 0))
12099 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12101 /* !X ^ X is always true. */
12102 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12103 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12104 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12106 /* X ^ !X is always true. */
12107 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12108 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12109 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12115 tem = fold_comparison (loc, code, type, op0, op1);
12116 if (tem != NULL_TREE)
12119 /* bool_var != 0 becomes bool_var. */
12120 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12121 && code == NE_EXPR)
12122 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12124 /* bool_var == 1 becomes bool_var. */
12125 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12126 && code == EQ_EXPR)
12127 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12129 /* bool_var != 1 becomes !bool_var. */
12130 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12131 && code == NE_EXPR)
12132 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12133 fold_convert_loc (loc, type, arg0));
12135 /* bool_var == 0 becomes !bool_var. */
12136 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12137 && code == EQ_EXPR)
12138 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12139 fold_convert_loc (loc, type, arg0));
12141 /* !exp != 0 becomes !exp */
12142 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12143 && code == NE_EXPR)
12144 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12146 /* If this is an equality comparison of the address of two non-weak,
12147 unaliased symbols neither of which are extern (since we do not
12148 have access to attributes for externs), then we know the result. */
12149 if (TREE_CODE (arg0) == ADDR_EXPR
12150 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12151 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12152 && ! lookup_attribute ("alias",
12153 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12154 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12155 && TREE_CODE (arg1) == ADDR_EXPR
12156 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12157 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12158 && ! lookup_attribute ("alias",
12159 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12160 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12162 /* We know that we're looking at the address of two
12163 non-weak, unaliased, static _DECL nodes.
12165 It is both wasteful and incorrect to call operand_equal_p
12166 to compare the two ADDR_EXPR nodes. It is wasteful in that
12167 all we need to do is test pointer equality for the arguments
12168 to the two ADDR_EXPR nodes. It is incorrect to use
12169 operand_equal_p as that function is NOT equivalent to a
12170 C equality test. It can in fact return false for two
12171 objects which would test as equal using the C equality
12173 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12174 return constant_boolean_node (equal
12175 ? code == EQ_EXPR : code != EQ_EXPR,
12179 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12180 a MINUS_EXPR of a constant, we can convert it into a comparison with
12181 a revised constant as long as no overflow occurs. */
12182 if (TREE_CODE (arg1) == INTEGER_CST
12183 && (TREE_CODE (arg0) == PLUS_EXPR
12184 || TREE_CODE (arg0) == MINUS_EXPR)
12185 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12186 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12187 ? MINUS_EXPR : PLUS_EXPR,
12188 fold_convert_loc (loc, TREE_TYPE (arg0),
12190 TREE_OPERAND (arg0, 1)))
12191 && !TREE_OVERFLOW (tem))
12192 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12194 /* Similarly for a NEGATE_EXPR. */
12195 if (TREE_CODE (arg0) == NEGATE_EXPR
12196 && TREE_CODE (arg1) == INTEGER_CST
12197 && 0 != (tem = negate_expr (arg1))
12198 && TREE_CODE (tem) == INTEGER_CST
12199 && !TREE_OVERFLOW (tem))
12200 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12202 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12203 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12204 && TREE_CODE (arg1) == INTEGER_CST
12205 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12206 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12207 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12208 fold_convert_loc (loc,
12211 TREE_OPERAND (arg0, 1)));
12213 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12214 if ((TREE_CODE (arg0) == PLUS_EXPR
12215 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12216 || TREE_CODE (arg0) == MINUS_EXPR)
12217 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12218 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12219 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12221 tree val = TREE_OPERAND (arg0, 1);
12222 return omit_two_operands_loc (loc, type,
12223 fold_build2_loc (loc, code, type,
12225 build_int_cst (TREE_TYPE (val),
12227 TREE_OPERAND (arg0, 0), arg1);
12230 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12231 if (TREE_CODE (arg0) == MINUS_EXPR
12232 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12233 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12234 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12236 return omit_two_operands_loc (loc, type,
12238 ? boolean_true_node : boolean_false_node,
12239 TREE_OPERAND (arg0, 1), arg1);
12242 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12243 for !=. Don't do this for ordered comparisons due to overflow. */
12244 if (TREE_CODE (arg0) == MINUS_EXPR
12245 && integer_zerop (arg1))
12246 return fold_build2_loc (loc, code, type,
12247 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12249 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12250 if (TREE_CODE (arg0) == ABS_EXPR
12251 && (integer_zerop (arg1) || real_zerop (arg1)))
12252 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12254 /* If this is an EQ or NE comparison with zero and ARG0 is
12255 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12256 two operations, but the latter can be done in one less insn
12257 on machines that have only two-operand insns or on which a
12258 constant cannot be the first operand. */
12259 if (TREE_CODE (arg0) == BIT_AND_EXPR
12260 && integer_zerop (arg1))
12262 tree arg00 = TREE_OPERAND (arg0, 0);
12263 tree arg01 = TREE_OPERAND (arg0, 1);
12264 if (TREE_CODE (arg00) == LSHIFT_EXPR
12265 && integer_onep (TREE_OPERAND (arg00, 0)))
12267 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12268 arg01, TREE_OPERAND (arg00, 1));
12269 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12270 build_int_cst (TREE_TYPE (arg0), 1));
12271 return fold_build2_loc (loc, code, type,
12272 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12275 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12276 && integer_onep (TREE_OPERAND (arg01, 0)))
12278 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12279 arg00, TREE_OPERAND (arg01, 1));
12280 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12281 build_int_cst (TREE_TYPE (arg0), 1));
12282 return fold_build2_loc (loc, code, type,
12283 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12288 /* If this is an NE or EQ comparison of zero against the result of a
12289 signed MOD operation whose second operand is a power of 2, make
12290 the MOD operation unsigned since it is simpler and equivalent. */
12291 if (integer_zerop (arg1)
12292 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12293 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12294 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12295 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12296 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12297 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12299 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12300 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12301 fold_convert_loc (loc, newtype,
12302 TREE_OPERAND (arg0, 0)),
12303 fold_convert_loc (loc, newtype,
12304 TREE_OPERAND (arg0, 1)));
12306 return fold_build2_loc (loc, code, type, newmod,
12307 fold_convert_loc (loc, newtype, arg1));
12310 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12311 C1 is a valid shift constant, and C2 is a power of two, i.e.
12313 if (TREE_CODE (arg0) == BIT_AND_EXPR
12314 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12315 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12317 && integer_pow2p (TREE_OPERAND (arg0, 1))
12318 && integer_zerop (arg1))
12320 tree itype = TREE_TYPE (arg0);
12321 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12322 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12324 /* Check for a valid shift count. */
12325 if (TREE_INT_CST_HIGH (arg001) == 0
12326 && TREE_INT_CST_LOW (arg001) < prec)
12328 tree arg01 = TREE_OPERAND (arg0, 1);
12329 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12330 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12331 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12332 can be rewritten as (X & (C2 << C1)) != 0. */
12333 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12335 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12336 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12337 return fold_build2_loc (loc, code, type, tem, arg1);
12339 /* Otherwise, for signed (arithmetic) shifts,
12340 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12341 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12342 else if (!TYPE_UNSIGNED (itype))
12343 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12344 arg000, build_int_cst (itype, 0));
12345 /* Otherwise, of unsigned (logical) shifts,
12346 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12347 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12349 return omit_one_operand_loc (loc, type,
12350 code == EQ_EXPR ? integer_one_node
12351 : integer_zero_node,
12356 /* If this is an NE comparison of zero with an AND of one, remove the
12357 comparison since the AND will give the correct value. */
12358 if (code == NE_EXPR
12359 && integer_zerop (arg1)
12360 && TREE_CODE (arg0) == BIT_AND_EXPR
12361 && integer_onep (TREE_OPERAND (arg0, 1)))
12362 return fold_convert_loc (loc, type, arg0);
12364 /* If we have (A & C) == C where C is a power of 2, convert this into
12365 (A & C) != 0. Similarly for NE_EXPR. */
12366 if (TREE_CODE (arg0) == BIT_AND_EXPR
12367 && integer_pow2p (TREE_OPERAND (arg0, 1))
12368 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12369 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12370 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12371 integer_zero_node));
12373 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12374 bit, then fold the expression into A < 0 or A >= 0. */
12375 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12379 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12380 Similarly for NE_EXPR. */
12381 if (TREE_CODE (arg0) == BIT_AND_EXPR
12382 && TREE_CODE (arg1) == INTEGER_CST
12383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12385 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12386 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12387 TREE_OPERAND (arg0, 1));
12388 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12390 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12391 if (integer_nonzerop (dandnotc))
12392 return omit_one_operand_loc (loc, type, rslt, arg0);
12395 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12396 Similarly for NE_EXPR. */
12397 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12398 && TREE_CODE (arg1) == INTEGER_CST
12399 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12401 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12402 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12403 TREE_OPERAND (arg0, 1), notd);
12404 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12405 if (integer_nonzerop (candnotd))
12406 return omit_one_operand_loc (loc, type, rslt, arg0);
12409 /* If this is a comparison of a field, we may be able to simplify it. */
12410 if ((TREE_CODE (arg0) == COMPONENT_REF
12411 || TREE_CODE (arg0) == BIT_FIELD_REF)
12412 /* Handle the constant case even without -O
12413 to make sure the warnings are given. */
12414 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12416 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12421 /* Optimize comparisons of strlen vs zero to a compare of the
12422 first character of the string vs zero. To wit,
12423 strlen(ptr) == 0 => *ptr == 0
12424 strlen(ptr) != 0 => *ptr != 0
12425 Other cases should reduce to one of these two (or a constant)
12426 due to the return value of strlen being unsigned. */
12427 if (TREE_CODE (arg0) == CALL_EXPR
12428 && integer_zerop (arg1))
12430 tree fndecl = get_callee_fndecl (arg0);
12433 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12434 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12435 && call_expr_nargs (arg0) == 1
12436 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12438 tree iref = build_fold_indirect_ref_loc (loc,
12439 CALL_EXPR_ARG (arg0, 0));
12440 return fold_build2_loc (loc, code, type, iref,
12441 build_int_cst (TREE_TYPE (iref), 0));
12445 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12446 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12447 if (TREE_CODE (arg0) == RSHIFT_EXPR
12448 && integer_zerop (arg1)
12449 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12451 tree arg00 = TREE_OPERAND (arg0, 0);
12452 tree arg01 = TREE_OPERAND (arg0, 1);
12453 tree itype = TREE_TYPE (arg00);
12454 if (TREE_INT_CST_HIGH (arg01) == 0
12455 && TREE_INT_CST_LOW (arg01)
12456 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12458 if (TYPE_UNSIGNED (itype))
12460 itype = signed_type_for (itype);
12461 arg00 = fold_convert_loc (loc, itype, arg00);
12463 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12464 type, arg00, build_int_cst (itype, 0));
12468 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12469 if (integer_zerop (arg1)
12470 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12471 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12472 TREE_OPERAND (arg0, 1));
12474 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12475 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12476 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12477 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12478 build_int_cst (TREE_TYPE (arg1), 0));
12479 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12480 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12481 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12482 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12483 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12484 build_int_cst (TREE_TYPE (arg1), 0));
12486 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12487 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12488 && TREE_CODE (arg1) == INTEGER_CST
12489 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12490 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12491 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12492 TREE_OPERAND (arg0, 1), arg1));
12494 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12495 (X & C) == 0 when C is a single bit. */
12496 if (TREE_CODE (arg0) == BIT_AND_EXPR
12497 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12498 && integer_zerop (arg1)
12499 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12501 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12502 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12503 TREE_OPERAND (arg0, 1));
12504 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12508 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12509 constant C is a power of two, i.e. a single bit. */
12510 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12511 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12512 && integer_zerop (arg1)
12513 && integer_pow2p (TREE_OPERAND (arg0, 1))
12514 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12515 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12517 tree arg00 = TREE_OPERAND (arg0, 0);
12518 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12519 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12522 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12523 when is C is a power of two, i.e. a single bit. */
12524 if (TREE_CODE (arg0) == BIT_AND_EXPR
12525 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12526 && integer_zerop (arg1)
12527 && integer_pow2p (TREE_OPERAND (arg0, 1))
12528 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12529 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12531 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12532 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12533 arg000, TREE_OPERAND (arg0, 1));
12534 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12535 tem, build_int_cst (TREE_TYPE (tem), 0));
12538 if (integer_zerop (arg1)
12539 && tree_expr_nonzero_p (arg0))
12541 tree res = constant_boolean_node (code==NE_EXPR, type);
12542 return omit_one_operand_loc (loc, type, res, arg0);
12545 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12546 if (TREE_CODE (arg0) == NEGATE_EXPR
12547 && TREE_CODE (arg1) == NEGATE_EXPR)
12548 return fold_build2_loc (loc, code, type,
12549 TREE_OPERAND (arg0, 0),
12550 TREE_OPERAND (arg1, 0));
12552 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12553 if (TREE_CODE (arg0) == BIT_AND_EXPR
12554 && TREE_CODE (arg1) == BIT_AND_EXPR)
12556 tree arg00 = TREE_OPERAND (arg0, 0);
12557 tree arg01 = TREE_OPERAND (arg0, 1);
12558 tree arg10 = TREE_OPERAND (arg1, 0);
12559 tree arg11 = TREE_OPERAND (arg1, 1);
12560 tree itype = TREE_TYPE (arg0);
12562 if (operand_equal_p (arg01, arg11, 0))
12563 return fold_build2_loc (loc, code, type,
12564 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12565 fold_build2_loc (loc,
12566 BIT_XOR_EXPR, itype,
12569 build_int_cst (itype, 0));
12571 if (operand_equal_p (arg01, arg10, 0))
12572 return fold_build2_loc (loc, code, type,
12573 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12574 fold_build2_loc (loc,
12575 BIT_XOR_EXPR, itype,
12578 build_int_cst (itype, 0));
12580 if (operand_equal_p (arg00, arg11, 0))
12581 return fold_build2_loc (loc, code, type,
12582 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12583 fold_build2_loc (loc,
12584 BIT_XOR_EXPR, itype,
12587 build_int_cst (itype, 0));
12589 if (operand_equal_p (arg00, arg10, 0))
12590 return fold_build2_loc (loc, code, type,
12591 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12592 fold_build2_loc (loc,
12593 BIT_XOR_EXPR, itype,
12596 build_int_cst (itype, 0));
12599 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12600 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12602 tree arg00 = TREE_OPERAND (arg0, 0);
12603 tree arg01 = TREE_OPERAND (arg0, 1);
12604 tree arg10 = TREE_OPERAND (arg1, 0);
12605 tree arg11 = TREE_OPERAND (arg1, 1);
12606 tree itype = TREE_TYPE (arg0);
12608 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12609 operand_equal_p guarantees no side-effects so we don't need
12610 to use omit_one_operand on Z. */
12611 if (operand_equal_p (arg01, arg11, 0))
12612 return fold_build2_loc (loc, code, type, arg00, arg10);
12613 if (operand_equal_p (arg01, arg10, 0))
12614 return fold_build2_loc (loc, code, type, arg00, arg11);
12615 if (operand_equal_p (arg00, arg11, 0))
12616 return fold_build2_loc (loc, code, type, arg01, arg10);
12617 if (operand_equal_p (arg00, arg10, 0))
12618 return fold_build2_loc (loc, code, type, arg01, arg11);
12620 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12621 if (TREE_CODE (arg01) == INTEGER_CST
12622 && TREE_CODE (arg11) == INTEGER_CST)
12623 return fold_build2_loc (loc, code, type,
12624 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12625 fold_build2_loc (loc,
12626 BIT_XOR_EXPR, itype,
12631 /* Attempt to simplify equality/inequality comparisons of complex
12632 values. Only lower the comparison if the result is known or
12633 can be simplified to a single scalar comparison. */
12634 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12635 || TREE_CODE (arg0) == COMPLEX_CST)
12636 && (TREE_CODE (arg1) == COMPLEX_EXPR
12637 || TREE_CODE (arg1) == COMPLEX_CST))
12639 tree real0, imag0, real1, imag1;
12642 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12644 real0 = TREE_OPERAND (arg0, 0);
12645 imag0 = TREE_OPERAND (arg0, 1);
12649 real0 = TREE_REALPART (arg0);
12650 imag0 = TREE_IMAGPART (arg0);
12653 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12655 real1 = TREE_OPERAND (arg1, 0);
12656 imag1 = TREE_OPERAND (arg1, 1);
12660 real1 = TREE_REALPART (arg1);
12661 imag1 = TREE_IMAGPART (arg1);
12664 rcond = fold_binary_loc (loc, code, type, real0, real1);
12665 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12667 if (integer_zerop (rcond))
12669 if (code == EQ_EXPR)
12670 return omit_two_operands_loc (loc, type, boolean_false_node,
12672 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12676 if (code == NE_EXPR)
12677 return omit_two_operands_loc (loc, type, boolean_true_node,
12679 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12683 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12684 if (icond && TREE_CODE (icond) == INTEGER_CST)
12686 if (integer_zerop (icond))
12688 if (code == EQ_EXPR)
12689 return omit_two_operands_loc (loc, type, boolean_false_node,
12691 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12695 if (code == NE_EXPR)
12696 return omit_two_operands_loc (loc, type, boolean_true_node,
12698 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12709 tem = fold_comparison (loc, code, type, op0, op1);
12710 if (tem != NULL_TREE)
12713 /* Transform comparisons of the form X +- C CMP X. */
12714 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12715 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12716 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12717 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12718 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12719 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12721 tree arg01 = TREE_OPERAND (arg0, 1);
12722 enum tree_code code0 = TREE_CODE (arg0);
12725 if (TREE_CODE (arg01) == REAL_CST)
12726 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12728 is_positive = tree_int_cst_sgn (arg01);
12730 /* (X - c) > X becomes false. */
12731 if (code == GT_EXPR
12732 && ((code0 == MINUS_EXPR && is_positive >= 0)
12733 || (code0 == PLUS_EXPR && is_positive <= 0)))
12735 if (TREE_CODE (arg01) == INTEGER_CST
12736 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12737 fold_overflow_warning (("assuming signed overflow does not "
12738 "occur when assuming that (X - c) > X "
12739 "is always false"),
12740 WARN_STRICT_OVERFLOW_ALL);
12741 return constant_boolean_node (0, type);
12744 /* Likewise (X + c) < X becomes false. */
12745 if (code == LT_EXPR
12746 && ((code0 == PLUS_EXPR && is_positive >= 0)
12747 || (code0 == MINUS_EXPR && is_positive <= 0)))
12749 if (TREE_CODE (arg01) == INTEGER_CST
12750 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12751 fold_overflow_warning (("assuming signed overflow does not "
12752 "occur when assuming that "
12753 "(X + c) < X is always false"),
12754 WARN_STRICT_OVERFLOW_ALL);
12755 return constant_boolean_node (0, type);
12758 /* Convert (X - c) <= X to true. */
12759 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12761 && ((code0 == MINUS_EXPR && is_positive >= 0)
12762 || (code0 == PLUS_EXPR && is_positive <= 0)))
12764 if (TREE_CODE (arg01) == INTEGER_CST
12765 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12766 fold_overflow_warning (("assuming signed overflow does not "
12767 "occur when assuming that "
12768 "(X - c) <= X is always true"),
12769 WARN_STRICT_OVERFLOW_ALL);
12770 return constant_boolean_node (1, type);
12773 /* Convert (X + c) >= X to true. */
12774 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12776 && ((code0 == PLUS_EXPR && is_positive >= 0)
12777 || (code0 == MINUS_EXPR && is_positive <= 0)))
12779 if (TREE_CODE (arg01) == INTEGER_CST
12780 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12781 fold_overflow_warning (("assuming signed overflow does not "
12782 "occur when assuming that "
12783 "(X + c) >= X is always true"),
12784 WARN_STRICT_OVERFLOW_ALL);
12785 return constant_boolean_node (1, type);
12788 if (TREE_CODE (arg01) == INTEGER_CST)
12790 /* Convert X + c > X and X - c < X to true for integers. */
12791 if (code == GT_EXPR
12792 && ((code0 == PLUS_EXPR && is_positive > 0)
12793 || (code0 == MINUS_EXPR && is_positive < 0)))
12795 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12796 fold_overflow_warning (("assuming signed overflow does "
12797 "not occur when assuming that "
12798 "(X + c) > X is always true"),
12799 WARN_STRICT_OVERFLOW_ALL);
12800 return constant_boolean_node (1, type);
12803 if (code == LT_EXPR
12804 && ((code0 == MINUS_EXPR && is_positive > 0)
12805 || (code0 == PLUS_EXPR && is_positive < 0)))
12807 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12808 fold_overflow_warning (("assuming signed overflow does "
12809 "not occur when assuming that "
12810 "(X - c) < X is always true"),
12811 WARN_STRICT_OVERFLOW_ALL);
12812 return constant_boolean_node (1, type);
12815 /* Convert X + c <= X and X - c >= X to false for integers. */
12816 if (code == LE_EXPR
12817 && ((code0 == PLUS_EXPR && is_positive > 0)
12818 || (code0 == MINUS_EXPR && is_positive < 0)))
12820 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12821 fold_overflow_warning (("assuming signed overflow does "
12822 "not occur when assuming that "
12823 "(X + c) <= X is always false"),
12824 WARN_STRICT_OVERFLOW_ALL);
12825 return constant_boolean_node (0, type);
12828 if (code == GE_EXPR
12829 && ((code0 == MINUS_EXPR && is_positive > 0)
12830 || (code0 == PLUS_EXPR && is_positive < 0)))
12832 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12833 fold_overflow_warning (("assuming signed overflow does "
12834 "not occur when assuming that "
12835 "(X - c) >= X is always false"),
12836 WARN_STRICT_OVERFLOW_ALL);
12837 return constant_boolean_node (0, type);
12842 /* Comparisons with the highest or lowest possible integer of
12843 the specified precision will have known values. */
12845 tree arg1_type = TREE_TYPE (arg1);
12846 unsigned int width = TYPE_PRECISION (arg1_type);
12848 if (TREE_CODE (arg1) == INTEGER_CST
12849 && width <= 2 * HOST_BITS_PER_WIDE_INT
12850 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12852 HOST_WIDE_INT signed_max_hi;
12853 unsigned HOST_WIDE_INT signed_max_lo;
12854 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12856 if (width <= HOST_BITS_PER_WIDE_INT)
12858 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12863 if (TYPE_UNSIGNED (arg1_type))
12865 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12871 max_lo = signed_max_lo;
12872 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12878 width -= HOST_BITS_PER_WIDE_INT;
12879 signed_max_lo = -1;
12880 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12885 if (TYPE_UNSIGNED (arg1_type))
12887 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12892 max_hi = signed_max_hi;
12893 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12897 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12898 && TREE_INT_CST_LOW (arg1) == max_lo)
12902 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12905 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12908 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12911 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12913 /* The GE_EXPR and LT_EXPR cases above are not normally
12914 reached because of previous transformations. */
12919 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12921 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12925 arg1 = const_binop (PLUS_EXPR, arg1,
12926 build_int_cst (TREE_TYPE (arg1), 1));
12927 return fold_build2_loc (loc, EQ_EXPR, type,
12928 fold_convert_loc (loc,
12929 TREE_TYPE (arg1), arg0),
12932 arg1 = const_binop (PLUS_EXPR, arg1,
12933 build_int_cst (TREE_TYPE (arg1), 1));
12934 return fold_build2_loc (loc, NE_EXPR, type,
12935 fold_convert_loc (loc, TREE_TYPE (arg1),
12941 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12943 && TREE_INT_CST_LOW (arg1) == min_lo)
12947 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12950 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12953 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12956 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12961 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12963 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12967 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12968 return fold_build2_loc (loc, NE_EXPR, type,
12969 fold_convert_loc (loc,
12970 TREE_TYPE (arg1), arg0),
12973 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12974 return fold_build2_loc (loc, EQ_EXPR, type,
12975 fold_convert_loc (loc, TREE_TYPE (arg1),
12982 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12983 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12984 && TYPE_UNSIGNED (arg1_type)
12985 /* We will flip the signedness of the comparison operator
12986 associated with the mode of arg1, so the sign bit is
12987 specified by this mode. Check that arg1 is the signed
12988 max associated with this sign bit. */
12989 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12990 /* signed_type does not work on pointer types. */
12991 && INTEGRAL_TYPE_P (arg1_type))
12993 /* The following case also applies to X < signed_max+1
12994 and X >= signed_max+1 because previous transformations. */
12995 if (code == LE_EXPR || code == GT_EXPR)
12998 st = signed_type_for (TREE_TYPE (arg1));
12999 return fold_build2_loc (loc,
13000 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13001 type, fold_convert_loc (loc, st, arg0),
13002 build_int_cst (st, 0));
13008 /* If we are comparing an ABS_EXPR with a constant, we can
13009 convert all the cases into explicit comparisons, but they may
13010 well not be faster than doing the ABS and one comparison.
13011 But ABS (X) <= C is a range comparison, which becomes a subtraction
13012 and a comparison, and is probably faster. */
13013 if (code == LE_EXPR
13014 && TREE_CODE (arg1) == INTEGER_CST
13015 && TREE_CODE (arg0) == ABS_EXPR
13016 && ! TREE_SIDE_EFFECTS (arg0)
13017 && (0 != (tem = negate_expr (arg1)))
13018 && TREE_CODE (tem) == INTEGER_CST
13019 && !TREE_OVERFLOW (tem))
13020 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13021 build2 (GE_EXPR, type,
13022 TREE_OPERAND (arg0, 0), tem),
13023 build2 (LE_EXPR, type,
13024 TREE_OPERAND (arg0, 0), arg1));
13026 /* Convert ABS_EXPR<x> >= 0 to true. */
13027 strict_overflow_p = false;
13028 if (code == GE_EXPR
13029 && (integer_zerop (arg1)
13030 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13031 && real_zerop (arg1)))
13032 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13034 if (strict_overflow_p)
13035 fold_overflow_warning (("assuming signed overflow does not occur "
13036 "when simplifying comparison of "
13037 "absolute value and zero"),
13038 WARN_STRICT_OVERFLOW_CONDITIONAL);
13039 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13042 /* Convert ABS_EXPR<x> < 0 to false. */
13043 strict_overflow_p = false;
13044 if (code == LT_EXPR
13045 && (integer_zerop (arg1) || real_zerop (arg1))
13046 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13048 if (strict_overflow_p)
13049 fold_overflow_warning (("assuming signed overflow does not occur "
13050 "when simplifying comparison of "
13051 "absolute value and zero"),
13052 WARN_STRICT_OVERFLOW_CONDITIONAL);
13053 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13056 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13057 and similarly for >= into !=. */
13058 if ((code == LT_EXPR || code == GE_EXPR)
13059 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13060 && TREE_CODE (arg1) == LSHIFT_EXPR
13061 && integer_onep (TREE_OPERAND (arg1, 0)))
13062 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13063 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13064 TREE_OPERAND (arg1, 1)),
13065 build_int_cst (TREE_TYPE (arg0), 0));
13067 if ((code == LT_EXPR || code == GE_EXPR)
13068 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13069 && CONVERT_EXPR_P (arg1)
13070 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13071 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13073 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13074 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13075 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13076 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13077 build_int_cst (TREE_TYPE (arg0), 0));
13082 case UNORDERED_EXPR:
13090 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13092 t1 = fold_relational_const (code, type, arg0, arg1);
13093 if (t1 != NULL_TREE)
13097 /* If the first operand is NaN, the result is constant. */
13098 if (TREE_CODE (arg0) == REAL_CST
13099 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13100 && (code != LTGT_EXPR || ! flag_trapping_math))
13102 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13103 ? integer_zero_node
13104 : integer_one_node;
13105 return omit_one_operand_loc (loc, type, t1, arg1);
13108 /* If the second operand is NaN, the result is constant. */
13109 if (TREE_CODE (arg1) == REAL_CST
13110 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13111 && (code != LTGT_EXPR || ! flag_trapping_math))
13113 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13114 ? integer_zero_node
13115 : integer_one_node;
13116 return omit_one_operand_loc (loc, type, t1, arg0);
13119 /* Simplify unordered comparison of something with itself. */
13120 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13121 && operand_equal_p (arg0, arg1, 0))
13122 return constant_boolean_node (1, type);
13124 if (code == LTGT_EXPR
13125 && !flag_trapping_math
13126 && operand_equal_p (arg0, arg1, 0))
13127 return constant_boolean_node (0, type);
13129 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13131 tree targ0 = strip_float_extensions (arg0);
13132 tree targ1 = strip_float_extensions (arg1);
13133 tree newtype = TREE_TYPE (targ0);
13135 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13136 newtype = TREE_TYPE (targ1);
13138 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13139 return fold_build2_loc (loc, code, type,
13140 fold_convert_loc (loc, newtype, targ0),
13141 fold_convert_loc (loc, newtype, targ1));
13146 case COMPOUND_EXPR:
13147 /* When pedantic, a compound expression can be neither an lvalue
13148 nor an integer constant expression. */
13149 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13151 /* Don't let (0, 0) be null pointer constant. */
13152 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13153 : fold_convert_loc (loc, type, arg1);
13154 return pedantic_non_lvalue_loc (loc, tem);
13157 if ((TREE_CODE (arg0) == REAL_CST
13158 && TREE_CODE (arg1) == REAL_CST)
13159 || (TREE_CODE (arg0) == INTEGER_CST
13160 && TREE_CODE (arg1) == INTEGER_CST))
13161 return build_complex (type, arg0, arg1);
13165 /* An ASSERT_EXPR should never be passed to fold_binary. */
13166 gcc_unreachable ();
13170 } /* switch (code) */
13173 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13174 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13178 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13180 switch (TREE_CODE (*tp))
13186 *walk_subtrees = 0;
13188 /* ... fall through ... */
13195 /* Return whether the sub-tree ST contains a label which is accessible from
13196 outside the sub-tree. */
13199 contains_label_p (tree st)
13202 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13205 /* Fold a ternary expression of code CODE and type TYPE with operands
13206 OP0, OP1, and OP2. Return the folded expression if folding is
13207 successful. Otherwise, return NULL_TREE. */
13210 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13211 tree op0, tree op1, tree op2)
13214 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13215 enum tree_code_class kind = TREE_CODE_CLASS (code);
13217 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13218 && TREE_CODE_LENGTH (code) == 3);
13220 /* Strip any conversions that don't change the mode. This is safe
13221 for every expression, except for a comparison expression because
13222 its signedness is derived from its operands. So, in the latter
13223 case, only strip conversions that don't change the signedness.
13225 Note that this is done as an internal manipulation within the
13226 constant folder, in order to find the simplest representation of
13227 the arguments so that their form can be studied. In any cases,
13228 the appropriate type conversions should be put back in the tree
13229 that will get out of the constant folder. */
13250 case COMPONENT_REF:
13251 if (TREE_CODE (arg0) == CONSTRUCTOR
13252 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13254 unsigned HOST_WIDE_INT idx;
13256 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13263 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13264 so all simple results must be passed through pedantic_non_lvalue. */
13265 if (TREE_CODE (arg0) == INTEGER_CST)
13267 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13268 tem = integer_zerop (arg0) ? op2 : op1;
13269 /* Only optimize constant conditions when the selected branch
13270 has the same type as the COND_EXPR. This avoids optimizing
13271 away "c ? x : throw", where the throw has a void type.
13272 Avoid throwing away that operand which contains label. */
13273 if ((!TREE_SIDE_EFFECTS (unused_op)
13274 || !contains_label_p (unused_op))
13275 && (! VOID_TYPE_P (TREE_TYPE (tem))
13276 || VOID_TYPE_P (type)))
13277 return pedantic_non_lvalue_loc (loc, tem);
13280 if (operand_equal_p (arg1, op2, 0))
13281 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13283 /* If we have A op B ? A : C, we may be able to convert this to a
13284 simpler expression, depending on the operation and the values
13285 of B and C. Signed zeros prevent all of these transformations,
13286 for reasons given above each one.
13288 Also try swapping the arguments and inverting the conditional. */
13289 if (COMPARISON_CLASS_P (arg0)
13290 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13291 arg1, TREE_OPERAND (arg0, 1))
13292 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13294 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13299 if (COMPARISON_CLASS_P (arg0)
13300 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13302 TREE_OPERAND (arg0, 1))
13303 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13305 tem = fold_truth_not_expr (loc, arg0);
13306 if (tem && COMPARISON_CLASS_P (tem))
13308 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13314 /* If the second operand is simpler than the third, swap them
13315 since that produces better jump optimization results. */
13316 if (truth_value_p (TREE_CODE (arg0))
13317 && tree_swap_operands_p (op1, op2, false))
13319 /* See if this can be inverted. If it can't, possibly because
13320 it was a floating-point inequality comparison, don't do
13322 tem = fold_truth_not_expr (loc, arg0);
13324 return fold_build3_loc (loc, code, type, tem, op2, op1);
13327 /* Convert A ? 1 : 0 to simply A. */
13328 if (integer_onep (op1)
13329 && integer_zerop (op2)
13330 /* If we try to convert OP0 to our type, the
13331 call to fold will try to move the conversion inside
13332 a COND, which will recurse. In that case, the COND_EXPR
13333 is probably the best choice, so leave it alone. */
13334 && type == TREE_TYPE (arg0))
13335 return pedantic_non_lvalue_loc (loc, arg0);
13337 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13338 over COND_EXPR in cases such as floating point comparisons. */
13339 if (integer_zerop (op1)
13340 && integer_onep (op2)
13341 && truth_value_p (TREE_CODE (arg0)))
13342 return pedantic_non_lvalue_loc (loc,
13343 fold_convert_loc (loc, type,
13344 invert_truthvalue_loc (loc,
13347 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13348 if (TREE_CODE (arg0) == LT_EXPR
13349 && integer_zerop (TREE_OPERAND (arg0, 1))
13350 && integer_zerop (op2)
13351 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13353 /* sign_bit_p only checks ARG1 bits within A's precision.
13354 If <sign bit of A> has wider type than A, bits outside
13355 of A's precision in <sign bit of A> need to be checked.
13356 If they are all 0, this optimization needs to be done
13357 in unsigned A's type, if they are all 1 in signed A's type,
13358 otherwise this can't be done. */
13359 if (TYPE_PRECISION (TREE_TYPE (tem))
13360 < TYPE_PRECISION (TREE_TYPE (arg1))
13361 && TYPE_PRECISION (TREE_TYPE (tem))
13362 < TYPE_PRECISION (type))
13364 unsigned HOST_WIDE_INT mask_lo;
13365 HOST_WIDE_INT mask_hi;
13366 int inner_width, outer_width;
13369 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13370 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13371 if (outer_width > TYPE_PRECISION (type))
13372 outer_width = TYPE_PRECISION (type);
13374 if (outer_width > HOST_BITS_PER_WIDE_INT)
13376 mask_hi = ((unsigned HOST_WIDE_INT) -1
13377 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13383 mask_lo = ((unsigned HOST_WIDE_INT) -1
13384 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13386 if (inner_width > HOST_BITS_PER_WIDE_INT)
13388 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13389 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13393 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13394 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13396 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13397 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13399 tem_type = signed_type_for (TREE_TYPE (tem));
13400 tem = fold_convert_loc (loc, tem_type, tem);
13402 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13403 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13405 tem_type = unsigned_type_for (TREE_TYPE (tem));
13406 tem = fold_convert_loc (loc, tem_type, tem);
13414 fold_convert_loc (loc, type,
13415 fold_build2_loc (loc, BIT_AND_EXPR,
13416 TREE_TYPE (tem), tem,
13417 fold_convert_loc (loc,
13422 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13423 already handled above. */
13424 if (TREE_CODE (arg0) == BIT_AND_EXPR
13425 && integer_onep (TREE_OPERAND (arg0, 1))
13426 && integer_zerop (op2)
13427 && integer_pow2p (arg1))
13429 tree tem = TREE_OPERAND (arg0, 0);
13431 if (TREE_CODE (tem) == RSHIFT_EXPR
13432 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13433 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13434 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13435 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13436 TREE_OPERAND (tem, 0), arg1);
13439 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13440 is probably obsolete because the first operand should be a
13441 truth value (that's why we have the two cases above), but let's
13442 leave it in until we can confirm this for all front-ends. */
13443 if (integer_zerop (op2)
13444 && TREE_CODE (arg0) == NE_EXPR
13445 && integer_zerop (TREE_OPERAND (arg0, 1))
13446 && integer_pow2p (arg1)
13447 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13448 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13449 arg1, OEP_ONLY_CONST))
13450 return pedantic_non_lvalue_loc (loc,
13451 fold_convert_loc (loc, type,
13452 TREE_OPERAND (arg0, 0)));
13454 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13455 if (integer_zerop (op2)
13456 && truth_value_p (TREE_CODE (arg0))
13457 && truth_value_p (TREE_CODE (arg1)))
13458 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13459 fold_convert_loc (loc, type, arg0),
13462 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13463 if (integer_onep (op2)
13464 && truth_value_p (TREE_CODE (arg0))
13465 && truth_value_p (TREE_CODE (arg1)))
13467 /* Only perform transformation if ARG0 is easily inverted. */
13468 tem = fold_truth_not_expr (loc, arg0);
13470 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13471 fold_convert_loc (loc, type, tem),
13475 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13476 if (integer_zerop (arg1)
13477 && truth_value_p (TREE_CODE (arg0))
13478 && truth_value_p (TREE_CODE (op2)))
13480 /* Only perform transformation if ARG0 is easily inverted. */
13481 tem = fold_truth_not_expr (loc, arg0);
13483 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13484 fold_convert_loc (loc, type, tem),
13488 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13489 if (integer_onep (arg1)
13490 && truth_value_p (TREE_CODE (arg0))
13491 && truth_value_p (TREE_CODE (op2)))
13492 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13493 fold_convert_loc (loc, type, arg0),
13499 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13500 of fold_ternary on them. */
13501 gcc_unreachable ();
13503 case BIT_FIELD_REF:
13504 if ((TREE_CODE (arg0) == VECTOR_CST
13505 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13506 && type == TREE_TYPE (TREE_TYPE (arg0)))
13508 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13509 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13512 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13513 && (idx % width) == 0
13514 && (idx = idx / width)
13515 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13517 tree elements = NULL_TREE;
13519 if (TREE_CODE (arg0) == VECTOR_CST)
13520 elements = TREE_VECTOR_CST_ELTS (arg0);
13523 unsigned HOST_WIDE_INT idx;
13526 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13527 elements = tree_cons (NULL_TREE, value, elements);
13529 while (idx-- > 0 && elements)
13530 elements = TREE_CHAIN (elements);
13532 return TREE_VALUE (elements);
13534 return build_zero_cst (type);
13538 /* A bit-field-ref that referenced the full argument can be stripped. */
13539 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13540 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13541 && integer_zerop (op2))
13542 return fold_convert_loc (loc, type, arg0);
13547 /* For integers we can decompose the FMA if possible. */
13548 if (TREE_CODE (arg0) == INTEGER_CST
13549 && TREE_CODE (arg1) == INTEGER_CST)
13550 return fold_build2_loc (loc, PLUS_EXPR, type,
13551 const_binop (MULT_EXPR, arg0, arg1), arg2);
13552 if (integer_zerop (arg2))
13553 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13555 return fold_fma (loc, type, arg0, arg1, arg2);
13559 } /* switch (code) */
13562 /* Perform constant folding and related simplification of EXPR.
13563 The related simplifications include x*1 => x, x*0 => 0, etc.,
13564 and application of the associative law.
13565 NOP_EXPR conversions may be removed freely (as long as we
13566 are careful not to change the type of the overall expression).
13567 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13568 but we can constant-fold them if they have constant operands. */
13570 #ifdef ENABLE_FOLD_CHECKING
13571 # define fold(x) fold_1 (x)
13572 static tree fold_1 (tree);
13578 const tree t = expr;
13579 enum tree_code code = TREE_CODE (t);
13580 enum tree_code_class kind = TREE_CODE_CLASS (code);
13582 location_t loc = EXPR_LOCATION (expr);
13584 /* Return right away if a constant. */
13585 if (kind == tcc_constant)
13588 /* CALL_EXPR-like objects with variable numbers of operands are
13589 treated specially. */
13590 if (kind == tcc_vl_exp)
13592 if (code == CALL_EXPR)
13594 tem = fold_call_expr (loc, expr, false);
13595 return tem ? tem : expr;
13600 if (IS_EXPR_CODE_CLASS (kind))
13602 tree type = TREE_TYPE (t);
13603 tree op0, op1, op2;
13605 switch (TREE_CODE_LENGTH (code))
13608 op0 = TREE_OPERAND (t, 0);
13609 tem = fold_unary_loc (loc, code, type, op0);
13610 return tem ? tem : expr;
13612 op0 = TREE_OPERAND (t, 0);
13613 op1 = TREE_OPERAND (t, 1);
13614 tem = fold_binary_loc (loc, code, type, op0, op1);
13615 return tem ? tem : expr;
13617 op0 = TREE_OPERAND (t, 0);
13618 op1 = TREE_OPERAND (t, 1);
13619 op2 = TREE_OPERAND (t, 2);
13620 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13621 return tem ? tem : expr;
13631 tree op0 = TREE_OPERAND (t, 0);
13632 tree op1 = TREE_OPERAND (t, 1);
13634 if (TREE_CODE (op1) == INTEGER_CST
13635 && TREE_CODE (op0) == CONSTRUCTOR
13636 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13638 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13639 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13640 unsigned HOST_WIDE_INT begin = 0;
13642 /* Find a matching index by means of a binary search. */
13643 while (begin != end)
13645 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13646 tree index = VEC_index (constructor_elt, elts, middle)->index;
13648 if (TREE_CODE (index) == INTEGER_CST
13649 && tree_int_cst_lt (index, op1))
13650 begin = middle + 1;
13651 else if (TREE_CODE (index) == INTEGER_CST
13652 && tree_int_cst_lt (op1, index))
13654 else if (TREE_CODE (index) == RANGE_EXPR
13655 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13656 begin = middle + 1;
13657 else if (TREE_CODE (index) == RANGE_EXPR
13658 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13661 return VEC_index (constructor_elt, elts, middle)->value;
13669 return fold (DECL_INITIAL (t));
13673 } /* switch (code) */
13676 #ifdef ENABLE_FOLD_CHECKING
13679 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13680 static void fold_check_failed (const_tree, const_tree);
13681 void print_fold_checksum (const_tree);
13683 /* When --enable-checking=fold, compute a digest of expr before
13684 and after actual fold call to see if fold did not accidentally
13685 change original expr. */
13691 struct md5_ctx ctx;
13692 unsigned char checksum_before[16], checksum_after[16];
13695 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13696 md5_init_ctx (&ctx);
13697 fold_checksum_tree (expr, &ctx, ht);
13698 md5_finish_ctx (&ctx, checksum_before);
13701 ret = fold_1 (expr);
13703 md5_init_ctx (&ctx);
13704 fold_checksum_tree (expr, &ctx, ht);
13705 md5_finish_ctx (&ctx, checksum_after);
13708 if (memcmp (checksum_before, checksum_after, 16))
13709 fold_check_failed (expr, ret);
13715 print_fold_checksum (const_tree expr)
13717 struct md5_ctx ctx;
13718 unsigned char checksum[16], cnt;
13721 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13722 md5_init_ctx (&ctx);
13723 fold_checksum_tree (expr, &ctx, ht);
13724 md5_finish_ctx (&ctx, checksum);
13726 for (cnt = 0; cnt < 16; ++cnt)
13727 fprintf (stderr, "%02x", checksum[cnt]);
13728 putc ('\n', stderr);
13732 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13734 internal_error ("fold check: original tree changed by fold");
13738 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13741 enum tree_code code;
13742 union tree_node buf;
13747 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13748 <= sizeof (struct tree_function_decl))
13749 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13752 slot = (void **) htab_find_slot (ht, expr, INSERT);
13755 *slot = CONST_CAST_TREE (expr);
13756 code = TREE_CODE (expr);
13757 if (TREE_CODE_CLASS (code) == tcc_declaration
13758 && DECL_ASSEMBLER_NAME_SET_P (expr))
13760 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13761 memcpy ((char *) &buf, expr, tree_size (expr));
13762 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13763 expr = (tree) &buf;
13765 else if (TREE_CODE_CLASS (code) == tcc_type
13766 && (TYPE_POINTER_TO (expr)
13767 || TYPE_REFERENCE_TO (expr)
13768 || TYPE_CACHED_VALUES_P (expr)
13769 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13770 || TYPE_NEXT_VARIANT (expr)))
13772 /* Allow these fields to be modified. */
13774 memcpy ((char *) &buf, expr, tree_size (expr));
13775 expr = tmp = (tree) &buf;
13776 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13777 TYPE_POINTER_TO (tmp) = NULL;
13778 TYPE_REFERENCE_TO (tmp) = NULL;
13779 TYPE_NEXT_VARIANT (tmp) = NULL;
13780 if (TYPE_CACHED_VALUES_P (tmp))
13782 TYPE_CACHED_VALUES_P (tmp) = 0;
13783 TYPE_CACHED_VALUES (tmp) = NULL;
13786 md5_process_bytes (expr, tree_size (expr), ctx);
13787 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13788 if (TREE_CODE_CLASS (code) != tcc_type
13789 && TREE_CODE_CLASS (code) != tcc_declaration
13790 && code != TREE_LIST
13791 && code != SSA_NAME)
13792 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13793 switch (TREE_CODE_CLASS (code))
13799 md5_process_bytes (TREE_STRING_POINTER (expr),
13800 TREE_STRING_LENGTH (expr), ctx);
13803 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13804 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13807 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13813 case tcc_exceptional:
13817 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13818 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13819 expr = TREE_CHAIN (expr);
13820 goto recursive_label;
13823 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13824 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13830 case tcc_expression:
13831 case tcc_reference:
13832 case tcc_comparison:
13835 case tcc_statement:
13837 len = TREE_OPERAND_LENGTH (expr);
13838 for (i = 0; i < len; ++i)
13839 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13841 case tcc_declaration:
13842 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13843 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13844 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13846 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13847 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13848 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13849 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13850 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13852 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13853 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13855 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13857 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13858 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13859 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13863 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13864 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13865 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13866 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13867 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13868 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13869 if (INTEGRAL_TYPE_P (expr)
13870 || SCALAR_FLOAT_TYPE_P (expr))
13872 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13873 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13875 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13876 if (TREE_CODE (expr) == RECORD_TYPE
13877 || TREE_CODE (expr) == UNION_TYPE
13878 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13879 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13880 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13887 /* Helper function for outputting the checksum of a tree T. When
13888 debugging with gdb, you can "define mynext" to be "next" followed
13889 by "call debug_fold_checksum (op0)", then just trace down till the
13892 DEBUG_FUNCTION void
13893 debug_fold_checksum (const_tree t)
13896 unsigned char checksum[16];
13897 struct md5_ctx ctx;
13898 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13900 md5_init_ctx (&ctx);
13901 fold_checksum_tree (t, &ctx, ht);
13902 md5_finish_ctx (&ctx, checksum);
13905 for (i = 0; i < 16; i++)
13906 fprintf (stderr, "%d ", checksum[i]);
13908 fprintf (stderr, "\n");
13913 /* Fold a unary tree expression with code CODE of type TYPE with an
13914 operand OP0. LOC is the location of the resulting expression.
13915 Return a folded expression if successful. Otherwise, return a tree
13916 expression with code CODE of type TYPE with an operand OP0. */
13919 fold_build1_stat_loc (location_t loc,
13920 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13923 #ifdef ENABLE_FOLD_CHECKING
13924 unsigned char checksum_before[16], checksum_after[16];
13925 struct md5_ctx ctx;
13928 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13929 md5_init_ctx (&ctx);
13930 fold_checksum_tree (op0, &ctx, ht);
13931 md5_finish_ctx (&ctx, checksum_before);
13935 tem = fold_unary_loc (loc, code, type, op0);
13937 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13939 #ifdef ENABLE_FOLD_CHECKING
13940 md5_init_ctx (&ctx);
13941 fold_checksum_tree (op0, &ctx, ht);
13942 md5_finish_ctx (&ctx, checksum_after);
13945 if (memcmp (checksum_before, checksum_after, 16))
13946 fold_check_failed (op0, tem);
13951 /* Fold a binary tree expression with code CODE of type TYPE with
13952 operands OP0 and OP1. LOC is the location of the resulting
13953 expression. Return a folded expression if successful. Otherwise,
13954 return a tree expression with code CODE of type TYPE with operands
13958 fold_build2_stat_loc (location_t loc,
13959 enum tree_code code, tree type, tree op0, tree op1
13963 #ifdef ENABLE_FOLD_CHECKING
13964 unsigned char checksum_before_op0[16],
13965 checksum_before_op1[16],
13966 checksum_after_op0[16],
13967 checksum_after_op1[16];
13968 struct md5_ctx ctx;
13971 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13972 md5_init_ctx (&ctx);
13973 fold_checksum_tree (op0, &ctx, ht);
13974 md5_finish_ctx (&ctx, checksum_before_op0);
13977 md5_init_ctx (&ctx);
13978 fold_checksum_tree (op1, &ctx, ht);
13979 md5_finish_ctx (&ctx, checksum_before_op1);
13983 tem = fold_binary_loc (loc, code, type, op0, op1);
13985 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13987 #ifdef ENABLE_FOLD_CHECKING
13988 md5_init_ctx (&ctx);
13989 fold_checksum_tree (op0, &ctx, ht);
13990 md5_finish_ctx (&ctx, checksum_after_op0);
13993 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13994 fold_check_failed (op0, tem);
13996 md5_init_ctx (&ctx);
13997 fold_checksum_tree (op1, &ctx, ht);
13998 md5_finish_ctx (&ctx, checksum_after_op1);
14001 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14002 fold_check_failed (op1, tem);
14007 /* Fold a ternary tree expression with code CODE of type TYPE with
14008 operands OP0, OP1, and OP2. Return a folded expression if
14009 successful. Otherwise, return a tree expression with code CODE of
14010 type TYPE with operands OP0, OP1, and OP2. */
14013 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14014 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14017 #ifdef ENABLE_FOLD_CHECKING
14018 unsigned char checksum_before_op0[16],
14019 checksum_before_op1[16],
14020 checksum_before_op2[16],
14021 checksum_after_op0[16],
14022 checksum_after_op1[16],
14023 checksum_after_op2[16];
14024 struct md5_ctx ctx;
14027 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14028 md5_init_ctx (&ctx);
14029 fold_checksum_tree (op0, &ctx, ht);
14030 md5_finish_ctx (&ctx, checksum_before_op0);
14033 md5_init_ctx (&ctx);
14034 fold_checksum_tree (op1, &ctx, ht);
14035 md5_finish_ctx (&ctx, checksum_before_op1);
14038 md5_init_ctx (&ctx);
14039 fold_checksum_tree (op2, &ctx, ht);
14040 md5_finish_ctx (&ctx, checksum_before_op2);
14044 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14045 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14047 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14049 #ifdef ENABLE_FOLD_CHECKING
14050 md5_init_ctx (&ctx);
14051 fold_checksum_tree (op0, &ctx, ht);
14052 md5_finish_ctx (&ctx, checksum_after_op0);
14055 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14056 fold_check_failed (op0, tem);
14058 md5_init_ctx (&ctx);
14059 fold_checksum_tree (op1, &ctx, ht);
14060 md5_finish_ctx (&ctx, checksum_after_op1);
14063 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14064 fold_check_failed (op1, tem);
14066 md5_init_ctx (&ctx);
14067 fold_checksum_tree (op2, &ctx, ht);
14068 md5_finish_ctx (&ctx, checksum_after_op2);
14071 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14072 fold_check_failed (op2, tem);
14077 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14078 arguments in ARGARRAY, and a null static chain.
14079 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14080 of type TYPE from the given operands as constructed by build_call_array. */
14083 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14084 int nargs, tree *argarray)
14087 #ifdef ENABLE_FOLD_CHECKING
14088 unsigned char checksum_before_fn[16],
14089 checksum_before_arglist[16],
14090 checksum_after_fn[16],
14091 checksum_after_arglist[16];
14092 struct md5_ctx ctx;
14096 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14097 md5_init_ctx (&ctx);
14098 fold_checksum_tree (fn, &ctx, ht);
14099 md5_finish_ctx (&ctx, checksum_before_fn);
14102 md5_init_ctx (&ctx);
14103 for (i = 0; i < nargs; i++)
14104 fold_checksum_tree (argarray[i], &ctx, ht);
14105 md5_finish_ctx (&ctx, checksum_before_arglist);
14109 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14111 #ifdef ENABLE_FOLD_CHECKING
14112 md5_init_ctx (&ctx);
14113 fold_checksum_tree (fn, &ctx, ht);
14114 md5_finish_ctx (&ctx, checksum_after_fn);
14117 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14118 fold_check_failed (fn, tem);
14120 md5_init_ctx (&ctx);
14121 for (i = 0; i < nargs; i++)
14122 fold_checksum_tree (argarray[i], &ctx, ht);
14123 md5_finish_ctx (&ctx, checksum_after_arglist);
14126 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14127 fold_check_failed (NULL_TREE, tem);
14132 /* Perform constant folding and related simplification of initializer
14133 expression EXPR. These behave identically to "fold_buildN" but ignore
14134 potential run-time traps and exceptions that fold must preserve. */
14136 #define START_FOLD_INIT \
14137 int saved_signaling_nans = flag_signaling_nans;\
14138 int saved_trapping_math = flag_trapping_math;\
14139 int saved_rounding_math = flag_rounding_math;\
14140 int saved_trapv = flag_trapv;\
14141 int saved_folding_initializer = folding_initializer;\
14142 flag_signaling_nans = 0;\
14143 flag_trapping_math = 0;\
14144 flag_rounding_math = 0;\
14146 folding_initializer = 1;
14148 #define END_FOLD_INIT \
14149 flag_signaling_nans = saved_signaling_nans;\
14150 flag_trapping_math = saved_trapping_math;\
14151 flag_rounding_math = saved_rounding_math;\
14152 flag_trapv = saved_trapv;\
14153 folding_initializer = saved_folding_initializer;
14156 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14157 tree type, tree op)
14162 result = fold_build1_loc (loc, code, type, op);
14169 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14170 tree type, tree op0, tree op1)
14175 result = fold_build2_loc (loc, code, type, op0, op1);
14182 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14183 tree type, tree op0, tree op1, tree op2)
14188 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14195 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14196 int nargs, tree *argarray)
14201 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14207 #undef START_FOLD_INIT
14208 #undef END_FOLD_INIT
14210 /* Determine if first argument is a multiple of second argument. Return 0 if
14211 it is not, or we cannot easily determined it to be.
14213 An example of the sort of thing we care about (at this point; this routine
14214 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14215 fold cases do now) is discovering that
14217 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14223 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14225 This code also handles discovering that
14227 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14229 is a multiple of 8 so we don't have to worry about dealing with a
14230 possible remainder.
14232 Note that we *look* inside a SAVE_EXPR only to determine how it was
14233 calculated; it is not safe for fold to do much of anything else with the
14234 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14235 at run time. For example, the latter example above *cannot* be implemented
14236 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14237 evaluation time of the original SAVE_EXPR is not necessarily the same at
14238 the time the new expression is evaluated. The only optimization of this
14239 sort that would be valid is changing
14241 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14245 SAVE_EXPR (I) * SAVE_EXPR (J)
14247 (where the same SAVE_EXPR (J) is used in the original and the
14248 transformed version). */
14251 multiple_of_p (tree type, const_tree top, const_tree bottom)
14253 if (operand_equal_p (top, bottom, 0))
14256 if (TREE_CODE (type) != INTEGER_TYPE)
14259 switch (TREE_CODE (top))
14262 /* Bitwise and provides a power of two multiple. If the mask is
14263 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14264 if (!integer_pow2p (bottom))
14269 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14270 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14274 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14275 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14278 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14282 op1 = TREE_OPERAND (top, 1);
14283 /* const_binop may not detect overflow correctly,
14284 so check for it explicitly here. */
14285 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14286 > TREE_INT_CST_LOW (op1)
14287 && TREE_INT_CST_HIGH (op1) == 0
14288 && 0 != (t1 = fold_convert (type,
14289 const_binop (LSHIFT_EXPR,
14292 && !TREE_OVERFLOW (t1))
14293 return multiple_of_p (type, t1, bottom);
14298 /* Can't handle conversions from non-integral or wider integral type. */
14299 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14300 || (TYPE_PRECISION (type)
14301 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14304 /* .. fall through ... */
14307 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14310 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14311 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14314 if (TREE_CODE (bottom) != INTEGER_CST
14315 || integer_zerop (bottom)
14316 || (TYPE_UNSIGNED (type)
14317 && (tree_int_cst_sgn (top) < 0
14318 || tree_int_cst_sgn (bottom) < 0)))
14320 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14328 /* Return true if CODE or TYPE is known to be non-negative. */
14331 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14333 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14334 && truth_value_p (code))
14335 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14336 have a signed:1 type (where the value is -1 and 0). */
14341 /* Return true if (CODE OP0) is known to be non-negative. If the return
14342 value is based on the assumption that signed overflow is undefined,
14343 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14344 *STRICT_OVERFLOW_P. */
14347 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14348 bool *strict_overflow_p)
14350 if (TYPE_UNSIGNED (type))
14356 /* We can't return 1 if flag_wrapv is set because
14357 ABS_EXPR<INT_MIN> = INT_MIN. */
14358 if (!INTEGRAL_TYPE_P (type))
14360 if (TYPE_OVERFLOW_UNDEFINED (type))
14362 *strict_overflow_p = true;
14367 case NON_LVALUE_EXPR:
14369 case FIX_TRUNC_EXPR:
14370 return tree_expr_nonnegative_warnv_p (op0,
14371 strict_overflow_p);
14375 tree inner_type = TREE_TYPE (op0);
14376 tree outer_type = type;
14378 if (TREE_CODE (outer_type) == REAL_TYPE)
14380 if (TREE_CODE (inner_type) == REAL_TYPE)
14381 return tree_expr_nonnegative_warnv_p (op0,
14382 strict_overflow_p);
14383 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14385 if (TYPE_UNSIGNED (inner_type))
14387 return tree_expr_nonnegative_warnv_p (op0,
14388 strict_overflow_p);
14391 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14393 if (TREE_CODE (inner_type) == REAL_TYPE)
14394 return tree_expr_nonnegative_warnv_p (op0,
14395 strict_overflow_p);
14396 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14397 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14398 && TYPE_UNSIGNED (inner_type);
14404 return tree_simple_nonnegative_warnv_p (code, type);
14407 /* We don't know sign of `t', so be conservative and return false. */
14411 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14412 value is based on the assumption that signed overflow is undefined,
14413 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14414 *STRICT_OVERFLOW_P. */
14417 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14418 tree op1, bool *strict_overflow_p)
14420 if (TYPE_UNSIGNED (type))
14425 case POINTER_PLUS_EXPR:
14427 if (FLOAT_TYPE_P (type))
14428 return (tree_expr_nonnegative_warnv_p (op0,
14430 && tree_expr_nonnegative_warnv_p (op1,
14431 strict_overflow_p));
14433 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14434 both unsigned and at least 2 bits shorter than the result. */
14435 if (TREE_CODE (type) == INTEGER_TYPE
14436 && TREE_CODE (op0) == NOP_EXPR
14437 && TREE_CODE (op1) == NOP_EXPR)
14439 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14440 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14441 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14442 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14444 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14445 TYPE_PRECISION (inner2)) + 1;
14446 return prec < TYPE_PRECISION (type);
14452 if (FLOAT_TYPE_P (type))
14454 /* x * x for floating point x is always non-negative. */
14455 if (operand_equal_p (op0, op1, 0))
14457 return (tree_expr_nonnegative_warnv_p (op0,
14459 && tree_expr_nonnegative_warnv_p (op1,
14460 strict_overflow_p));
14463 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14464 both unsigned and their total bits is shorter than the result. */
14465 if (TREE_CODE (type) == INTEGER_TYPE
14466 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14467 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14469 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14470 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14472 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14473 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14476 bool unsigned0 = TYPE_UNSIGNED (inner0);
14477 bool unsigned1 = TYPE_UNSIGNED (inner1);
14479 if (TREE_CODE (op0) == INTEGER_CST)
14480 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14482 if (TREE_CODE (op1) == INTEGER_CST)
14483 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14485 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14486 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14488 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14489 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14490 : TYPE_PRECISION (inner0);
14492 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14493 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14494 : TYPE_PRECISION (inner1);
14496 return precision0 + precision1 < TYPE_PRECISION (type);
14503 return (tree_expr_nonnegative_warnv_p (op0,
14505 || tree_expr_nonnegative_warnv_p (op1,
14506 strict_overflow_p));
14512 case TRUNC_DIV_EXPR:
14513 case CEIL_DIV_EXPR:
14514 case FLOOR_DIV_EXPR:
14515 case ROUND_DIV_EXPR:
14516 return (tree_expr_nonnegative_warnv_p (op0,
14518 && tree_expr_nonnegative_warnv_p (op1,
14519 strict_overflow_p));
14521 case TRUNC_MOD_EXPR:
14522 case CEIL_MOD_EXPR:
14523 case FLOOR_MOD_EXPR:
14524 case ROUND_MOD_EXPR:
14525 return tree_expr_nonnegative_warnv_p (op0,
14526 strict_overflow_p);
14528 return tree_simple_nonnegative_warnv_p (code, type);
14531 /* We don't know sign of `t', so be conservative and return false. */
14535 /* Return true if T is known to be non-negative. If the return
14536 value is based on the assumption that signed overflow is undefined,
14537 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14538 *STRICT_OVERFLOW_P. */
14541 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14543 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14546 switch (TREE_CODE (t))
14549 return tree_int_cst_sgn (t) >= 0;
14552 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14555 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14558 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14560 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14561 strict_overflow_p));
14563 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14566 /* We don't know sign of `t', so be conservative and return false. */
14570 /* Return true if T is known to be non-negative. If the return
14571 value is based on the assumption that signed overflow is undefined,
14572 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14573 *STRICT_OVERFLOW_P. */
14576 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14577 tree arg0, tree arg1, bool *strict_overflow_p)
14579 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14580 switch (DECL_FUNCTION_CODE (fndecl))
14582 CASE_FLT_FN (BUILT_IN_ACOS):
14583 CASE_FLT_FN (BUILT_IN_ACOSH):
14584 CASE_FLT_FN (BUILT_IN_CABS):
14585 CASE_FLT_FN (BUILT_IN_COSH):
14586 CASE_FLT_FN (BUILT_IN_ERFC):
14587 CASE_FLT_FN (BUILT_IN_EXP):
14588 CASE_FLT_FN (BUILT_IN_EXP10):
14589 CASE_FLT_FN (BUILT_IN_EXP2):
14590 CASE_FLT_FN (BUILT_IN_FABS):
14591 CASE_FLT_FN (BUILT_IN_FDIM):
14592 CASE_FLT_FN (BUILT_IN_HYPOT):
14593 CASE_FLT_FN (BUILT_IN_POW10):
14594 CASE_INT_FN (BUILT_IN_FFS):
14595 CASE_INT_FN (BUILT_IN_PARITY):
14596 CASE_INT_FN (BUILT_IN_POPCOUNT):
14597 case BUILT_IN_BSWAP32:
14598 case BUILT_IN_BSWAP64:
14602 CASE_FLT_FN (BUILT_IN_SQRT):
14603 /* sqrt(-0.0) is -0.0. */
14604 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14606 return tree_expr_nonnegative_warnv_p (arg0,
14607 strict_overflow_p);
14609 CASE_FLT_FN (BUILT_IN_ASINH):
14610 CASE_FLT_FN (BUILT_IN_ATAN):
14611 CASE_FLT_FN (BUILT_IN_ATANH):
14612 CASE_FLT_FN (BUILT_IN_CBRT):
14613 CASE_FLT_FN (BUILT_IN_CEIL):
14614 CASE_FLT_FN (BUILT_IN_ERF):
14615 CASE_FLT_FN (BUILT_IN_EXPM1):
14616 CASE_FLT_FN (BUILT_IN_FLOOR):
14617 CASE_FLT_FN (BUILT_IN_FMOD):
14618 CASE_FLT_FN (BUILT_IN_FREXP):
14619 CASE_FLT_FN (BUILT_IN_LCEIL):
14620 CASE_FLT_FN (BUILT_IN_LDEXP):
14621 CASE_FLT_FN (BUILT_IN_LFLOOR):
14622 CASE_FLT_FN (BUILT_IN_LLCEIL):
14623 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14624 CASE_FLT_FN (BUILT_IN_LLRINT):
14625 CASE_FLT_FN (BUILT_IN_LLROUND):
14626 CASE_FLT_FN (BUILT_IN_LRINT):
14627 CASE_FLT_FN (BUILT_IN_LROUND):
14628 CASE_FLT_FN (BUILT_IN_MODF):
14629 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14630 CASE_FLT_FN (BUILT_IN_RINT):
14631 CASE_FLT_FN (BUILT_IN_ROUND):
14632 CASE_FLT_FN (BUILT_IN_SCALB):
14633 CASE_FLT_FN (BUILT_IN_SCALBLN):
14634 CASE_FLT_FN (BUILT_IN_SCALBN):
14635 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14636 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14637 CASE_FLT_FN (BUILT_IN_SINH):
14638 CASE_FLT_FN (BUILT_IN_TANH):
14639 CASE_FLT_FN (BUILT_IN_TRUNC):
14640 /* True if the 1st argument is nonnegative. */
14641 return tree_expr_nonnegative_warnv_p (arg0,
14642 strict_overflow_p);
14644 CASE_FLT_FN (BUILT_IN_FMAX):
14645 /* True if the 1st OR 2nd arguments are nonnegative. */
14646 return (tree_expr_nonnegative_warnv_p (arg0,
14648 || (tree_expr_nonnegative_warnv_p (arg1,
14649 strict_overflow_p)));
14651 CASE_FLT_FN (BUILT_IN_FMIN):
14652 /* True if the 1st AND 2nd arguments are nonnegative. */
14653 return (tree_expr_nonnegative_warnv_p (arg0,
14655 && (tree_expr_nonnegative_warnv_p (arg1,
14656 strict_overflow_p)));
14658 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14659 /* True if the 2nd argument is nonnegative. */
14660 return tree_expr_nonnegative_warnv_p (arg1,
14661 strict_overflow_p);
14663 CASE_FLT_FN (BUILT_IN_POWI):
14664 /* True if the 1st argument is nonnegative or the second
14665 argument is an even integer. */
14666 if (TREE_CODE (arg1) == INTEGER_CST
14667 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14669 return tree_expr_nonnegative_warnv_p (arg0,
14670 strict_overflow_p);
14672 CASE_FLT_FN (BUILT_IN_POW):
14673 /* True if the 1st argument is nonnegative or the second
14674 argument is an even integer valued real. */
14675 if (TREE_CODE (arg1) == REAL_CST)
14680 c = TREE_REAL_CST (arg1);
14681 n = real_to_integer (&c);
14684 REAL_VALUE_TYPE cint;
14685 real_from_integer (&cint, VOIDmode, n,
14686 n < 0 ? -1 : 0, 0);
14687 if (real_identical (&c, &cint))
14691 return tree_expr_nonnegative_warnv_p (arg0,
14692 strict_overflow_p);
14697 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14701 /* Return true if T is known to be non-negative. If the return
14702 value is based on the assumption that signed overflow is undefined,
14703 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14704 *STRICT_OVERFLOW_P. */
14707 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14709 enum tree_code code = TREE_CODE (t);
14710 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14717 tree temp = TARGET_EXPR_SLOT (t);
14718 t = TARGET_EXPR_INITIAL (t);
14720 /* If the initializer is non-void, then it's a normal expression
14721 that will be assigned to the slot. */
14722 if (!VOID_TYPE_P (t))
14723 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14725 /* Otherwise, the initializer sets the slot in some way. One common
14726 way is an assignment statement at the end of the initializer. */
14729 if (TREE_CODE (t) == BIND_EXPR)
14730 t = expr_last (BIND_EXPR_BODY (t));
14731 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14732 || TREE_CODE (t) == TRY_CATCH_EXPR)
14733 t = expr_last (TREE_OPERAND (t, 0));
14734 else if (TREE_CODE (t) == STATEMENT_LIST)
14739 if (TREE_CODE (t) == MODIFY_EXPR
14740 && TREE_OPERAND (t, 0) == temp)
14741 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14742 strict_overflow_p);
14749 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14750 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14752 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14753 get_callee_fndecl (t),
14756 strict_overflow_p);
14758 case COMPOUND_EXPR:
14760 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14761 strict_overflow_p);
14763 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14764 strict_overflow_p);
14766 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14767 strict_overflow_p);
14770 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14774 /* We don't know sign of `t', so be conservative and return false. */
14778 /* Return true if T is known to be non-negative. If the return
14779 value is based on the assumption that signed overflow is undefined,
14780 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14781 *STRICT_OVERFLOW_P. */
14784 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14786 enum tree_code code;
14787 if (t == error_mark_node)
14790 code = TREE_CODE (t);
14791 switch (TREE_CODE_CLASS (code))
14794 case tcc_comparison:
14795 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14797 TREE_OPERAND (t, 0),
14798 TREE_OPERAND (t, 1),
14799 strict_overflow_p);
14802 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14804 TREE_OPERAND (t, 0),
14805 strict_overflow_p);
14808 case tcc_declaration:
14809 case tcc_reference:
14810 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14818 case TRUTH_AND_EXPR:
14819 case TRUTH_OR_EXPR:
14820 case TRUTH_XOR_EXPR:
14821 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14823 TREE_OPERAND (t, 0),
14824 TREE_OPERAND (t, 1),
14825 strict_overflow_p);
14826 case TRUTH_NOT_EXPR:
14827 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14829 TREE_OPERAND (t, 0),
14830 strict_overflow_p);
14837 case WITH_SIZE_EXPR:
14839 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14842 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14846 /* Return true if `t' is known to be non-negative. Handle warnings
14847 about undefined signed overflow. */
14850 tree_expr_nonnegative_p (tree t)
14852 bool ret, strict_overflow_p;
14854 strict_overflow_p = false;
14855 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14856 if (strict_overflow_p)
14857 fold_overflow_warning (("assuming signed overflow does not occur when "
14858 "determining that expression is always "
14860 WARN_STRICT_OVERFLOW_MISC);
14865 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14866 For floating point we further ensure that T is not denormal.
14867 Similar logic is present in nonzero_address in rtlanal.h.
14869 If the return value is based on the assumption that signed overflow
14870 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14871 change *STRICT_OVERFLOW_P. */
14874 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14875 bool *strict_overflow_p)
14880 return tree_expr_nonzero_warnv_p (op0,
14881 strict_overflow_p);
14885 tree inner_type = TREE_TYPE (op0);
14886 tree outer_type = type;
14888 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14889 && tree_expr_nonzero_warnv_p (op0,
14890 strict_overflow_p));
14894 case NON_LVALUE_EXPR:
14895 return tree_expr_nonzero_warnv_p (op0,
14896 strict_overflow_p);
14905 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14906 For floating point we further ensure that T is not denormal.
14907 Similar logic is present in nonzero_address in rtlanal.h.
14909 If the return value is based on the assumption that signed overflow
14910 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14911 change *STRICT_OVERFLOW_P. */
14914 tree_binary_nonzero_warnv_p (enum tree_code code,
14917 tree op1, bool *strict_overflow_p)
14919 bool sub_strict_overflow_p;
14922 case POINTER_PLUS_EXPR:
14924 if (TYPE_OVERFLOW_UNDEFINED (type))
14926 /* With the presence of negative values it is hard
14927 to say something. */
14928 sub_strict_overflow_p = false;
14929 if (!tree_expr_nonnegative_warnv_p (op0,
14930 &sub_strict_overflow_p)
14931 || !tree_expr_nonnegative_warnv_p (op1,
14932 &sub_strict_overflow_p))
14934 /* One of operands must be positive and the other non-negative. */
14935 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14936 overflows, on a twos-complement machine the sum of two
14937 nonnegative numbers can never be zero. */
14938 return (tree_expr_nonzero_warnv_p (op0,
14940 || tree_expr_nonzero_warnv_p (op1,
14941 strict_overflow_p));
14946 if (TYPE_OVERFLOW_UNDEFINED (type))
14948 if (tree_expr_nonzero_warnv_p (op0,
14950 && tree_expr_nonzero_warnv_p (op1,
14951 strict_overflow_p))
14953 *strict_overflow_p = true;
14960 sub_strict_overflow_p = false;
14961 if (tree_expr_nonzero_warnv_p (op0,
14962 &sub_strict_overflow_p)
14963 && tree_expr_nonzero_warnv_p (op1,
14964 &sub_strict_overflow_p))
14966 if (sub_strict_overflow_p)
14967 *strict_overflow_p = true;
14972 sub_strict_overflow_p = false;
14973 if (tree_expr_nonzero_warnv_p (op0,
14974 &sub_strict_overflow_p))
14976 if (sub_strict_overflow_p)
14977 *strict_overflow_p = true;
14979 /* When both operands are nonzero, then MAX must be too. */
14980 if (tree_expr_nonzero_warnv_p (op1,
14981 strict_overflow_p))
14984 /* MAX where operand 0 is positive is positive. */
14985 return tree_expr_nonnegative_warnv_p (op0,
14986 strict_overflow_p);
14988 /* MAX where operand 1 is positive is positive. */
14989 else if (tree_expr_nonzero_warnv_p (op1,
14990 &sub_strict_overflow_p)
14991 && tree_expr_nonnegative_warnv_p (op1,
14992 &sub_strict_overflow_p))
14994 if (sub_strict_overflow_p)
14995 *strict_overflow_p = true;
15001 return (tree_expr_nonzero_warnv_p (op1,
15003 || tree_expr_nonzero_warnv_p (op0,
15004 strict_overflow_p));
15013 /* Return true when T is an address and is known to be nonzero.
15014 For floating point we further ensure that T is not denormal.
15015 Similar logic is present in nonzero_address in rtlanal.h.
15017 If the return value is based on the assumption that signed overflow
15018 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15019 change *STRICT_OVERFLOW_P. */
15022 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15024 bool sub_strict_overflow_p;
15025 switch (TREE_CODE (t))
15028 return !integer_zerop (t);
15032 tree base = TREE_OPERAND (t, 0);
15033 if (!DECL_P (base))
15034 base = get_base_address (base);
15039 /* Weak declarations may link to NULL. Other things may also be NULL
15040 so protect with -fdelete-null-pointer-checks; but not variables
15041 allocated on the stack. */
15043 && (flag_delete_null_pointer_checks
15044 || (DECL_CONTEXT (base)
15045 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15046 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15047 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15049 /* Constants are never weak. */
15050 if (CONSTANT_CLASS_P (base))
15057 sub_strict_overflow_p = false;
15058 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15059 &sub_strict_overflow_p)
15060 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15061 &sub_strict_overflow_p))
15063 if (sub_strict_overflow_p)
15064 *strict_overflow_p = true;
15075 /* Return true when T is an address and is known to be nonzero.
15076 For floating point we further ensure that T is not denormal.
15077 Similar logic is present in nonzero_address in rtlanal.h.
15079 If the return value is based on the assumption that signed overflow
15080 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15081 change *STRICT_OVERFLOW_P. */
15084 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15086 tree type = TREE_TYPE (t);
15087 enum tree_code code;
15089 /* Doing something useful for floating point would need more work. */
15090 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15093 code = TREE_CODE (t);
15094 switch (TREE_CODE_CLASS (code))
15097 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15098 strict_overflow_p);
15100 case tcc_comparison:
15101 return tree_binary_nonzero_warnv_p (code, type,
15102 TREE_OPERAND (t, 0),
15103 TREE_OPERAND (t, 1),
15104 strict_overflow_p);
15106 case tcc_declaration:
15107 case tcc_reference:
15108 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15116 case TRUTH_NOT_EXPR:
15117 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15118 strict_overflow_p);
15120 case TRUTH_AND_EXPR:
15121 case TRUTH_OR_EXPR:
15122 case TRUTH_XOR_EXPR:
15123 return tree_binary_nonzero_warnv_p (code, type,
15124 TREE_OPERAND (t, 0),
15125 TREE_OPERAND (t, 1),
15126 strict_overflow_p);
15133 case WITH_SIZE_EXPR:
15135 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15137 case COMPOUND_EXPR:
15140 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15141 strict_overflow_p);
15144 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15145 strict_overflow_p);
15148 return alloca_call_p (t);
15156 /* Return true when T is an address and is known to be nonzero.
15157 Handle warnings about undefined signed overflow. */
15160 tree_expr_nonzero_p (tree t)
15162 bool ret, strict_overflow_p;
15164 strict_overflow_p = false;
15165 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15166 if (strict_overflow_p)
15167 fold_overflow_warning (("assuming signed overflow does not occur when "
15168 "determining that expression is always "
15170 WARN_STRICT_OVERFLOW_MISC);
15174 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15175 attempt to fold the expression to a constant without modifying TYPE,
15178 If the expression could be simplified to a constant, then return
15179 the constant. If the expression would not be simplified to a
15180 constant, then return NULL_TREE. */
15183 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15185 tree tem = fold_binary (code, type, op0, op1);
15186 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15189 /* Given the components of a unary expression CODE, TYPE and OP0,
15190 attempt to fold the expression to a constant without modifying
15193 If the expression could be simplified to a constant, then return
15194 the constant. If the expression would not be simplified to a
15195 constant, then return NULL_TREE. */
15198 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15200 tree tem = fold_unary (code, type, op0);
15201 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15204 /* If EXP represents referencing an element in a constant string
15205 (either via pointer arithmetic or array indexing), return the
15206 tree representing the value accessed, otherwise return NULL. */
15209 fold_read_from_constant_string (tree exp)
15211 if ((TREE_CODE (exp) == INDIRECT_REF
15212 || TREE_CODE (exp) == ARRAY_REF)
15213 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15215 tree exp1 = TREE_OPERAND (exp, 0);
15218 location_t loc = EXPR_LOCATION (exp);
15220 if (TREE_CODE (exp) == INDIRECT_REF)
15221 string = string_constant (exp1, &index);
15224 tree low_bound = array_ref_low_bound (exp);
15225 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15227 /* Optimize the special-case of a zero lower bound.
15229 We convert the low_bound to sizetype to avoid some problems
15230 with constant folding. (E.g. suppose the lower bound is 1,
15231 and its mode is QI. Without the conversion,l (ARRAY
15232 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15233 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15234 if (! integer_zerop (low_bound))
15235 index = size_diffop_loc (loc, index,
15236 fold_convert_loc (loc, sizetype, low_bound));
15242 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15243 && TREE_CODE (string) == STRING_CST
15244 && TREE_CODE (index) == INTEGER_CST
15245 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15246 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15248 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15249 return build_int_cst_type (TREE_TYPE (exp),
15250 (TREE_STRING_POINTER (string)
15251 [TREE_INT_CST_LOW (index)]));
15256 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15257 an integer constant, real, or fixed-point constant.
15259 TYPE is the type of the result. */
15262 fold_negate_const (tree arg0, tree type)
15264 tree t = NULL_TREE;
15266 switch (TREE_CODE (arg0))
15270 double_int val = tree_to_double_int (arg0);
15271 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15273 t = force_fit_type_double (type, val, 1,
15274 (overflow | TREE_OVERFLOW (arg0))
15275 && !TYPE_UNSIGNED (type));
15280 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15285 FIXED_VALUE_TYPE f;
15286 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15287 &(TREE_FIXED_CST (arg0)), NULL,
15288 TYPE_SATURATING (type));
15289 t = build_fixed (type, f);
15290 /* Propagate overflow flags. */
15291 if (overflow_p | TREE_OVERFLOW (arg0))
15292 TREE_OVERFLOW (t) = 1;
15297 gcc_unreachable ();
15303 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15304 an integer constant or real constant.
15306 TYPE is the type of the result. */
15309 fold_abs_const (tree arg0, tree type)
15311 tree t = NULL_TREE;
15313 switch (TREE_CODE (arg0))
15317 double_int val = tree_to_double_int (arg0);
15319 /* If the value is unsigned or non-negative, then the absolute value
15320 is the same as the ordinary value. */
15321 if (TYPE_UNSIGNED (type)
15322 || !double_int_negative_p (val))
15325 /* If the value is negative, then the absolute value is
15331 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15332 t = force_fit_type_double (type, val, -1,
15333 overflow | TREE_OVERFLOW (arg0));
15339 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15340 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15346 gcc_unreachable ();
15352 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15353 constant. TYPE is the type of the result. */
15356 fold_not_const (const_tree arg0, tree type)
15360 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15362 val = double_int_not (tree_to_double_int (arg0));
15363 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15366 /* Given CODE, a relational operator, the target type, TYPE and two
15367 constant operands OP0 and OP1, return the result of the
15368 relational operation. If the result is not a compile time
15369 constant, then return NULL_TREE. */
15372 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15374 int result, invert;
15376 /* From here on, the only cases we handle are when the result is
15377 known to be a constant. */
15379 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15381 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15382 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15384 /* Handle the cases where either operand is a NaN. */
15385 if (real_isnan (c0) || real_isnan (c1))
15395 case UNORDERED_EXPR:
15409 if (flag_trapping_math)
15415 gcc_unreachable ();
15418 return constant_boolean_node (result, type);
15421 return constant_boolean_node (real_compare (code, c0, c1), type);
15424 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15426 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15427 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15428 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15431 /* Handle equality/inequality of complex constants. */
15432 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15434 tree rcond = fold_relational_const (code, type,
15435 TREE_REALPART (op0),
15436 TREE_REALPART (op1));
15437 tree icond = fold_relational_const (code, type,
15438 TREE_IMAGPART (op0),
15439 TREE_IMAGPART (op1));
15440 if (code == EQ_EXPR)
15441 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15442 else if (code == NE_EXPR)
15443 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15448 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15450 To compute GT, swap the arguments and do LT.
15451 To compute GE, do LT and invert the result.
15452 To compute LE, swap the arguments, do LT and invert the result.
15453 To compute NE, do EQ and invert the result.
15455 Therefore, the code below must handle only EQ and LT. */
15457 if (code == LE_EXPR || code == GT_EXPR)
15462 code = swap_tree_comparison (code);
15465 /* Note that it is safe to invert for real values here because we
15466 have already handled the one case that it matters. */
15469 if (code == NE_EXPR || code == GE_EXPR)
15472 code = invert_tree_comparison (code, false);
15475 /* Compute a result for LT or EQ if args permit;
15476 Otherwise return T. */
15477 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15479 if (code == EQ_EXPR)
15480 result = tree_int_cst_equal (op0, op1);
15481 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15482 result = INT_CST_LT_UNSIGNED (op0, op1);
15484 result = INT_CST_LT (op0, op1);
15491 return constant_boolean_node (result, type);
15494 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15495 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15499 fold_build_cleanup_point_expr (tree type, tree expr)
15501 /* If the expression does not have side effects then we don't have to wrap
15502 it with a cleanup point expression. */
15503 if (!TREE_SIDE_EFFECTS (expr))
15506 /* If the expression is a return, check to see if the expression inside the
15507 return has no side effects or the right hand side of the modify expression
15508 inside the return. If either don't have side effects set we don't need to
15509 wrap the expression in a cleanup point expression. Note we don't check the
15510 left hand side of the modify because it should always be a return decl. */
15511 if (TREE_CODE (expr) == RETURN_EXPR)
15513 tree op = TREE_OPERAND (expr, 0);
15514 if (!op || !TREE_SIDE_EFFECTS (op))
15516 op = TREE_OPERAND (op, 1);
15517 if (!TREE_SIDE_EFFECTS (op))
15521 return build1 (CLEANUP_POINT_EXPR, type, expr);
15524 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15525 of an indirection through OP0, or NULL_TREE if no simplification is
15529 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15535 subtype = TREE_TYPE (sub);
15536 if (!POINTER_TYPE_P (subtype))
15539 if (TREE_CODE (sub) == ADDR_EXPR)
15541 tree op = TREE_OPERAND (sub, 0);
15542 tree optype = TREE_TYPE (op);
15543 /* *&CONST_DECL -> to the value of the const decl. */
15544 if (TREE_CODE (op) == CONST_DECL)
15545 return DECL_INITIAL (op);
15546 /* *&p => p; make sure to handle *&"str"[cst] here. */
15547 if (type == optype)
15549 tree fop = fold_read_from_constant_string (op);
15555 /* *(foo *)&fooarray => fooarray[0] */
15556 else if (TREE_CODE (optype) == ARRAY_TYPE
15557 && type == TREE_TYPE (optype)
15558 && (!in_gimple_form
15559 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15561 tree type_domain = TYPE_DOMAIN (optype);
15562 tree min_val = size_zero_node;
15563 if (type_domain && TYPE_MIN_VALUE (type_domain))
15564 min_val = TYPE_MIN_VALUE (type_domain);
15566 && TREE_CODE (min_val) != INTEGER_CST)
15568 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15569 NULL_TREE, NULL_TREE);
15571 /* *(foo *)&complexfoo => __real__ complexfoo */
15572 else if (TREE_CODE (optype) == COMPLEX_TYPE
15573 && type == TREE_TYPE (optype))
15574 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15575 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15576 else if (TREE_CODE (optype) == VECTOR_TYPE
15577 && type == TREE_TYPE (optype))
15579 tree part_width = TYPE_SIZE (type);
15580 tree index = bitsize_int (0);
15581 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15585 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15586 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15588 tree op00 = TREE_OPERAND (sub, 0);
15589 tree op01 = TREE_OPERAND (sub, 1);
15592 if (TREE_CODE (op00) == ADDR_EXPR)
15595 op00 = TREE_OPERAND (op00, 0);
15596 op00type = TREE_TYPE (op00);
15598 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15599 if (TREE_CODE (op00type) == VECTOR_TYPE
15600 && type == TREE_TYPE (op00type))
15602 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15603 tree part_width = TYPE_SIZE (type);
15604 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15605 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15606 tree index = bitsize_int (indexi);
15608 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15609 return fold_build3_loc (loc,
15610 BIT_FIELD_REF, type, op00,
15611 part_width, index);
15614 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15615 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15616 && type == TREE_TYPE (op00type))
15618 tree size = TYPE_SIZE_UNIT (type);
15619 if (tree_int_cst_equal (size, op01))
15620 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15622 /* ((foo *)&fooarray)[1] => fooarray[1] */
15623 else if (TREE_CODE (op00type) == ARRAY_TYPE
15624 && type == TREE_TYPE (op00type))
15626 tree type_domain = TYPE_DOMAIN (op00type);
15627 tree min_val = size_zero_node;
15628 if (type_domain && TYPE_MIN_VALUE (type_domain))
15629 min_val = TYPE_MIN_VALUE (type_domain);
15630 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15631 TYPE_SIZE_UNIT (type));
15632 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15633 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15634 NULL_TREE, NULL_TREE);
15639 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15640 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15641 && type == TREE_TYPE (TREE_TYPE (subtype))
15642 && (!in_gimple_form
15643 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15646 tree min_val = size_zero_node;
15647 sub = build_fold_indirect_ref_loc (loc, sub);
15648 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15649 if (type_domain && TYPE_MIN_VALUE (type_domain))
15650 min_val = TYPE_MIN_VALUE (type_domain);
15652 && TREE_CODE (min_val) != INTEGER_CST)
15654 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15661 /* Builds an expression for an indirection through T, simplifying some
15665 build_fold_indirect_ref_loc (location_t loc, tree t)
15667 tree type = TREE_TYPE (TREE_TYPE (t));
15668 tree sub = fold_indirect_ref_1 (loc, type, t);
15673 return build1_loc (loc, INDIRECT_REF, type, t);
15676 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15679 fold_indirect_ref_loc (location_t loc, tree t)
15681 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15689 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15690 whose result is ignored. The type of the returned tree need not be
15691 the same as the original expression. */
15694 fold_ignored_result (tree t)
15696 if (!TREE_SIDE_EFFECTS (t))
15697 return integer_zero_node;
15700 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15703 t = TREE_OPERAND (t, 0);
15707 case tcc_comparison:
15708 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15709 t = TREE_OPERAND (t, 0);
15710 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15711 t = TREE_OPERAND (t, 1);
15716 case tcc_expression:
15717 switch (TREE_CODE (t))
15719 case COMPOUND_EXPR:
15720 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15722 t = TREE_OPERAND (t, 0);
15726 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15727 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15729 t = TREE_OPERAND (t, 0);
15742 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15743 This can only be applied to objects of a sizetype. */
15746 round_up_loc (location_t loc, tree value, int divisor)
15748 tree div = NULL_TREE;
15750 gcc_assert (divisor > 0);
15754 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15755 have to do anything. Only do this when we are not given a const,
15756 because in that case, this check is more expensive than just
15758 if (TREE_CODE (value) != INTEGER_CST)
15760 div = build_int_cst (TREE_TYPE (value), divisor);
15762 if (multiple_of_p (TREE_TYPE (value), value, div))
15766 /* If divisor is a power of two, simplify this to bit manipulation. */
15767 if (divisor == (divisor & -divisor))
15769 if (TREE_CODE (value) == INTEGER_CST)
15771 double_int val = tree_to_double_int (value);
15774 if ((val.low & (divisor - 1)) == 0)
15777 overflow_p = TREE_OVERFLOW (value);
15778 val.low &= ~(divisor - 1);
15779 val.low += divisor;
15787 return force_fit_type_double (TREE_TYPE (value), val,
15794 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15795 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15796 t = build_int_cst (TREE_TYPE (value), -divisor);
15797 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15803 div = build_int_cst (TREE_TYPE (value), divisor);
15804 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15805 value = size_binop_loc (loc, MULT_EXPR, value, div);
15811 /* Likewise, but round down. */
15814 round_down_loc (location_t loc, tree value, int divisor)
15816 tree div = NULL_TREE;
15818 gcc_assert (divisor > 0);
15822 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15823 have to do anything. Only do this when we are not given a const,
15824 because in that case, this check is more expensive than just
15826 if (TREE_CODE (value) != INTEGER_CST)
15828 div = build_int_cst (TREE_TYPE (value), divisor);
15830 if (multiple_of_p (TREE_TYPE (value), value, div))
15834 /* If divisor is a power of two, simplify this to bit manipulation. */
15835 if (divisor == (divisor & -divisor))
15839 t = build_int_cst (TREE_TYPE (value), -divisor);
15840 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15845 div = build_int_cst (TREE_TYPE (value), divisor);
15846 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15847 value = size_binop_loc (loc, MULT_EXPR, value, div);
15853 /* Returns the pointer to the base of the object addressed by EXP and
15854 extracts the information about the offset of the access, storing it
15855 to PBITPOS and POFFSET. */
15858 split_address_to_core_and_offset (tree exp,
15859 HOST_WIDE_INT *pbitpos, tree *poffset)
15862 enum machine_mode mode;
15863 int unsignedp, volatilep;
15864 HOST_WIDE_INT bitsize;
15865 location_t loc = EXPR_LOCATION (exp);
15867 if (TREE_CODE (exp) == ADDR_EXPR)
15869 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15870 poffset, &mode, &unsignedp, &volatilep,
15872 core = build_fold_addr_expr_loc (loc, core);
15878 *poffset = NULL_TREE;
15884 /* Returns true if addresses of E1 and E2 differ by a constant, false
15885 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15888 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15891 HOST_WIDE_INT bitpos1, bitpos2;
15892 tree toffset1, toffset2, tdiff, type;
15894 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15895 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15897 if (bitpos1 % BITS_PER_UNIT != 0
15898 || bitpos2 % BITS_PER_UNIT != 0
15899 || !operand_equal_p (core1, core2, 0))
15902 if (toffset1 && toffset2)
15904 type = TREE_TYPE (toffset1);
15905 if (type != TREE_TYPE (toffset2))
15906 toffset2 = fold_convert (type, toffset2);
15908 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15909 if (!cst_and_fits_in_hwi (tdiff))
15912 *diff = int_cst_value (tdiff);
15914 else if (toffset1 || toffset2)
15916 /* If only one of the offsets is non-constant, the difference cannot
15923 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15927 /* Simplify the floating point expression EXP when the sign of the
15928 result is not significant. Return NULL_TREE if no simplification
15932 fold_strip_sign_ops (tree exp)
15935 location_t loc = EXPR_LOCATION (exp);
15937 switch (TREE_CODE (exp))
15941 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15942 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15946 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15948 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15949 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15950 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15951 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15952 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15953 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15956 case COMPOUND_EXPR:
15957 arg0 = TREE_OPERAND (exp, 0);
15958 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15960 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15964 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15965 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15967 return fold_build3_loc (loc,
15968 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15969 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15970 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15975 const enum built_in_function fcode = builtin_mathfn_code (exp);
15978 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15979 /* Strip copysign function call, return the 1st argument. */
15980 arg0 = CALL_EXPR_ARG (exp, 0);
15981 arg1 = CALL_EXPR_ARG (exp, 1);
15982 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15985 /* Strip sign ops from the argument of "odd" math functions. */
15986 if (negate_mathfn_p (fcode))
15988 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15990 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);