1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
145 /* Similar to protected_set_expr_location, but never modify x in place,
146 if location can and needs to be set, unshare it. */
149 protected_set_expr_location_unshare (tree x, location_t loc)
151 if (CAN_HAVE_LOCATION_P (x)
152 && EXPR_LOCATION (x) != loc
153 && !(TREE_CODE (x) == SAVE_EXPR
154 || TREE_CODE (x) == TARGET_EXPR
155 || TREE_CODE (x) == BIND_EXPR))
158 SET_EXPR_LOCATION (x, loc);
164 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
165 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
166 and SUM1. Then this yields nonzero if overflow occurred during the
169 Overflow occurs if A and B have the same sign, but A and SUM differ in
170 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
172 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
174 /* If ARG2 divides ARG1 with zero remainder, carries out the division
175 of type CODE and returns the quotient.
176 Otherwise returns NULL_TREE. */
179 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
184 /* The sign of the division is according to operand two, that
185 does the correct thing for POINTER_PLUS_EXPR where we want
186 a signed division. */
187 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
188 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
189 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
192 quo = double_int_divmod (tree_to_double_int (arg1),
193 tree_to_double_int (arg2),
196 if (double_int_zero_p (rem))
197 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
211 static int fold_deferring_overflow_warnings;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
253 if (fold_deferred_overflow_warning != NULL
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
263 if (!issue || warnmsg == NULL)
266 if (gimple_no_warning_p (stmt))
269 /* Use the smallest code level when deciding to issue the
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
274 if (!issue_strict_overflow_warning (code))
278 locus = input_location;
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
284 /* Stop deferring overflow warnings, ignoring any deferred
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL, 0);
293 /* Whether we are deferring overflow warnings. */
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings > 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
307 if (fold_deferring_overflow_warnings > 0)
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
324 negate_mathfn_p (enum built_in_function code)
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
365 /* Check whether we may negate an integer constant T without causing
369 may_negate_without_overflow_p (const_tree t)
371 unsigned HOST_WIDE_INT val;
375 gcc_assert (TREE_CODE (t) == INTEGER_CST);
377 type = TREE_TYPE (t);
378 if (TYPE_UNSIGNED (type))
381 prec = TYPE_PRECISION (type);
382 if (prec > HOST_BITS_PER_WIDE_INT)
384 if (TREE_INT_CST_LOW (t) != 0)
386 prec -= HOST_BITS_PER_WIDE_INT;
387 val = TREE_INT_CST_HIGH (t);
390 val = TREE_INT_CST_LOW (t);
391 if (prec < HOST_BITS_PER_WIDE_INT)
392 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
393 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
396 /* Determine whether an expression T can be cheaply negated using
397 the function negate_expr without introducing undefined overflow. */
400 negate_expr_p (tree t)
407 type = TREE_TYPE (t);
410 switch (TREE_CODE (t))
413 if (TYPE_OVERFLOW_WRAPS (type))
416 /* Check that -CST will not overflow type. */
417 return may_negate_without_overflow_p (t);
419 return (INTEGRAL_TYPE_P (type)
420 && TYPE_OVERFLOW_WRAPS (type));
427 /* We want to canonicalize to positive real constants. Pretend
428 that only negative ones can be easily negated. */
429 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
432 return negate_expr_p (TREE_REALPART (t))
433 && negate_expr_p (TREE_IMAGPART (t));
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
440 return negate_expr_p (TREE_OPERAND (t, 0));
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1));
462 if (TYPE_UNSIGNED (TREE_TYPE (t)))
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
484 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
486 return negate_expr_p (TREE_OPERAND (t, 1))
487 || negate_expr_p (TREE_OPERAND (t, 0));
490 /* Negate -((double)float) as (double)(-float). */
491 if (TREE_CODE (type) == REAL_TYPE)
493 tree tem = strip_float_extensions (t);
495 return negate_expr_p (tem);
500 /* Negate -f(x) as f(-x). */
501 if (negate_mathfn_p (builtin_mathfn_code (t)))
502 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
507 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
509 tree op1 = TREE_OPERAND (t, 1);
510 if (TREE_INT_CST_HIGH (op1) == 0
511 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
512 == TREE_INT_CST_LOW (op1))
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
529 fold_negate_expr (location_t loc, tree t)
531 tree type = TREE_TYPE (t);
534 switch (TREE_CODE (t))
536 /* Convert - (~A) to A + 1. */
538 if (INTEGRAL_TYPE_P (type))
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 build_int_cst (type, 1));
544 tem = fold_negate_const (t, type);
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 || !TYPE_OVERFLOW_TRAPS (type))
551 tem = fold_negate_const (t, type);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
558 tem = fold_negate_const (t, type);
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
575 if (negate_expr_p (t))
576 return fold_build2_loc (loc, COMPLEX_EXPR, type,
577 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
578 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
582 if (negate_expr_p (t))
583 return fold_build1_loc (loc, CONJ_EXPR, type,
584 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
588 return TREE_OPERAND (t, 0);
591 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
592 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
594 /* -(A + B) -> (-B) - A. */
595 if (negate_expr_p (TREE_OPERAND (t, 1))
596 && reorder_operands_p (TREE_OPERAND (t, 0),
597 TREE_OPERAND (t, 1)))
599 tem = negate_expr (TREE_OPERAND (t, 1));
600 return fold_build2_loc (loc, MINUS_EXPR, type,
601 tem, TREE_OPERAND (t, 0));
604 /* -(A + B) -> (-A) - B. */
605 if (negate_expr_p (TREE_OPERAND (t, 0)))
607 tem = negate_expr (TREE_OPERAND (t, 0));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 1));
615 /* - (A - B) -> B - A */
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
617 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
618 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
619 return fold_build2_loc (loc, MINUS_EXPR, type,
620 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
624 if (TYPE_UNSIGNED (type))
630 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
632 tem = TREE_OPERAND (t, 1);
633 if (negate_expr_p (tem))
634 return fold_build2_loc (loc, TREE_CODE (t), type,
635 TREE_OPERAND (t, 0), negate_expr (tem));
636 tem = TREE_OPERAND (t, 0);
637 if (negate_expr_p (tem))
638 return fold_build2_loc (loc, TREE_CODE (t), type,
639 negate_expr (tem), TREE_OPERAND (t, 1));
648 /* In general we can't negate A / B, because if A is INT_MIN and
649 B is 1, we may turn this into INT_MIN / -1 which is undefined
650 and actually traps on some architectures. But if overflow is
651 undefined, we can negate, because - (INT_MIN / 1) is an
653 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
655 const char * const warnmsg = G_("assuming signed overflow does not "
656 "occur when negating a division");
657 tem = TREE_OPERAND (t, 1);
658 if (negate_expr_p (tem))
660 if (INTEGRAL_TYPE_P (type)
661 && (TREE_CODE (tem) != INTEGER_CST
662 || integer_onep (tem)))
663 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
664 return fold_build2_loc (loc, TREE_CODE (t), type,
665 TREE_OPERAND (t, 0), negate_expr (tem));
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
670 if (INTEGRAL_TYPE_P (type)
671 && (TREE_CODE (tem) != INTEGER_CST
672 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
673 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 negate_expr (tem), TREE_OPERAND (t, 1));
681 /* Convert -((double)float) into (double)(-float). */
682 if (TREE_CODE (type) == REAL_TYPE)
684 tem = strip_float_extensions (t);
685 if (tem != t && negate_expr_p (tem))
686 return fold_convert_loc (loc, type, negate_expr (tem));
691 /* Negate -f(x) as f(-x). */
692 if (negate_mathfn_p (builtin_mathfn_code (t))
693 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 fndecl = get_callee_fndecl (t);
698 arg = negate_expr (CALL_EXPR_ARG (t, 0));
699 return build_call_expr_loc (loc, fndecl, 1, arg);
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
705 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
707 tree op1 = TREE_OPERAND (t, 1);
708 if (TREE_INT_CST_HIGH (op1) == 0
709 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
710 == TREE_INT_CST_LOW (op1))
712 tree ntype = TYPE_UNSIGNED (type)
713 ? signed_type_for (type)
714 : unsigned_type_for (type);
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
742 loc = EXPR_LOCATION (t);
743 type = TREE_TYPE (t);
746 tem = fold_negate_expr (loc, t);
748 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
749 return fold_convert_loc (loc, type, tem);
752 /* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
756 the literal in *LITP and return the variable part. If a part isn't
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead.
767 If IN is itself a literal or constant, return it as appropriate.
769 Note that we do not guarantee that any of the three values will be the
770 same type as IN, but they will have the same signedness and mode. */
773 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
774 tree *minus_litp, int negate_p)
782 /* Strip any conversions that don't change the machine mode or signedness. */
783 STRIP_SIGN_NOPS (in);
785 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
786 || TREE_CODE (in) == FIXED_CST)
788 else if (TREE_CODE (in) == code
789 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
790 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
791 /* We can associate addition and subtraction together (even
792 though the C standard doesn't say so) for integers because
793 the value is not affected. For reals, the value might be
794 affected, so we can't. */
795 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
796 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
798 tree op0 = TREE_OPERAND (in, 0);
799 tree op1 = TREE_OPERAND (in, 1);
800 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
801 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
803 /* First see if either of the operands is a literal, then a constant. */
804 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
805 || TREE_CODE (op0) == FIXED_CST)
806 *litp = op0, op0 = 0;
807 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
808 || TREE_CODE (op1) == FIXED_CST)
809 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
811 if (op0 != 0 && TREE_CONSTANT (op0))
812 *conp = op0, op0 = 0;
813 else if (op1 != 0 && TREE_CONSTANT (op1))
814 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
816 /* If we haven't dealt with either operand, this is not a case we can
817 decompose. Otherwise, VAR is either of the ones remaining, if any. */
818 if (op0 != 0 && op1 != 0)
823 var = op1, neg_var_p = neg1_p;
825 /* Now do any needed negations. */
827 *minus_litp = *litp, *litp = 0;
829 *conp = negate_expr (*conp);
831 var = negate_expr (var);
833 else if (TREE_CONSTANT (in))
841 *minus_litp = *litp, *litp = 0;
842 else if (*minus_litp)
843 *litp = *minus_litp, *minus_litp = 0;
844 *conp = negate_expr (*conp);
845 var = negate_expr (var);
851 /* Re-associate trees split by the above function. T1 and T2 are
852 either expressions to associate or null. Return the new
853 expression, if any. LOC is the location of the new expression. If
854 we build an operation, do it in TYPE and with CODE. */
857 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
864 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
865 try to fold this since we will have infinite recursion. But do
866 deal with any NEGATE_EXPRs. */
867 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
868 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
870 if (code == PLUS_EXPR)
872 if (TREE_CODE (t1) == NEGATE_EXPR)
873 return build2_loc (loc, MINUS_EXPR, type,
874 fold_convert_loc (loc, type, t2),
875 fold_convert_loc (loc, type,
876 TREE_OPERAND (t1, 0)));
877 else if (TREE_CODE (t2) == NEGATE_EXPR)
878 return build2_loc (loc, MINUS_EXPR, type,
879 fold_convert_loc (loc, type, t1),
880 fold_convert_loc (loc, type,
881 TREE_OPERAND (t2, 0)));
882 else if (integer_zerop (t2))
883 return fold_convert_loc (loc, type, t1);
885 else if (code == MINUS_EXPR)
887 if (integer_zerop (t2))
888 return fold_convert_loc (loc, type, t1);
891 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
895 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
896 fold_convert_loc (loc, type, t2));
899 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
900 for use in int_const_binop, size_binop and size_diffop. */
903 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
905 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
907 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
922 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
923 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
924 && TYPE_MODE (type1) == TYPE_MODE (type2);
928 /* Combine two integer constants ARG1 and ARG2 under operation CODE
929 to produce a new constant. Return NULL_TREE if we don't know how
930 to evaluate CODE at compile-time.
932 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
935 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
937 double_int op1, op2, res, tmp;
939 tree type = TREE_TYPE (arg1);
940 bool uns = TYPE_UNSIGNED (type);
942 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
943 bool overflow = false;
945 op1 = tree_to_double_int (arg1);
946 op2 = tree_to_double_int (arg2);
951 res = double_int_ior (op1, op2);
955 res = double_int_xor (op1, op2);
959 res = double_int_and (op1, op2);
963 res = double_int_rshift (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type), !uns);
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
971 res = double_int_lshift (op1, double_int_to_shwi (op2),
972 TYPE_PRECISION (type), !uns);
976 res = double_int_rrotate (op1, double_int_to_shwi (op2),
977 TYPE_PRECISION (type));
981 res = double_int_lrotate (op1, double_int_to_shwi (op2),
982 TYPE_PRECISION (type));
986 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
987 &res.low, &res.high);
991 neg_double (op2.low, op2.high, &res.low, &res.high);
992 add_double (op1.low, op1.high, res.low, res.high,
993 &res.low, &res.high);
994 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
998 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
999 &res.low, &res.high);
1002 case TRUNC_DIV_EXPR:
1003 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1004 case EXACT_DIV_EXPR:
1005 /* This is a shortcut for a common special case. */
1006 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1007 && !TREE_OVERFLOW (arg1)
1008 && !TREE_OVERFLOW (arg2)
1009 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1011 if (code == CEIL_DIV_EXPR)
1012 op1.low += op2.low - 1;
1014 res.low = op1.low / op2.low, res.high = 0;
1018 /* ... fall through ... */
1020 case ROUND_DIV_EXPR:
1021 if (double_int_zero_p (op2))
1023 if (double_int_one_p (op2))
1028 if (double_int_equal_p (op1, op2)
1029 && ! double_int_zero_p (op1))
1031 res = double_int_one;
1034 overflow = div_and_round_double (code, uns,
1035 op1.low, op1.high, op2.low, op2.high,
1036 &res.low, &res.high,
1037 &tmp.low, &tmp.high);
1040 case TRUNC_MOD_EXPR:
1041 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1042 /* This is a shortcut for a common special case. */
1043 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1044 && !TREE_OVERFLOW (arg1)
1045 && !TREE_OVERFLOW (arg2)
1046 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1048 if (code == CEIL_MOD_EXPR)
1049 op1.low += op2.low - 1;
1050 res.low = op1.low % op2.low, res.high = 0;
1054 /* ... fall through ... */
1056 case ROUND_MOD_EXPR:
1057 if (double_int_zero_p (op2))
1059 overflow = div_and_round_double (code, uns,
1060 op1.low, op1.high, op2.low, op2.high,
1061 &tmp.low, &tmp.high,
1062 &res.low, &res.high);
1066 res = double_int_min (op1, op2, uns);
1070 res = double_int_max (op1, op2, uns);
1079 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1081 /* Propagate overflow flags ourselves. */
1082 if (((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1086 TREE_OVERFLOW (t) = 1;
1090 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1091 ((!uns || is_sizetype) && overflow)
1092 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1103 const_binop (enum tree_code code, tree arg1, tree arg2)
1105 /* Sanity check for the recursive cases. */
1112 if (TREE_CODE (arg1) == INTEGER_CST)
1113 return int_const_binop (code, arg1, arg2, 0);
1115 if (TREE_CODE (arg1) == REAL_CST)
1117 enum machine_mode mode;
1120 REAL_VALUE_TYPE value;
1121 REAL_VALUE_TYPE result;
1125 /* The following codes are handled by real_arithmetic. */
1140 d1 = TREE_REAL_CST (arg1);
1141 d2 = TREE_REAL_CST (arg2);
1143 type = TREE_TYPE (arg1);
1144 mode = TYPE_MODE (type);
1146 /* Don't perform operation if we honor signaling NaNs and
1147 either operand is a NaN. */
1148 if (HONOR_SNANS (mode)
1149 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1152 /* Don't perform operation if it would raise a division
1153 by zero exception. */
1154 if (code == RDIV_EXPR
1155 && REAL_VALUES_EQUAL (d2, dconst0)
1156 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1159 /* If either operand is a NaN, just return it. Otherwise, set up
1160 for floating-point trap; we return an overflow. */
1161 if (REAL_VALUE_ISNAN (d1))
1163 else if (REAL_VALUE_ISNAN (d2))
1166 inexact = real_arithmetic (&value, code, &d1, &d2);
1167 real_convert (&result, mode, &value);
1169 /* Don't constant fold this floating point operation if
1170 the result has overflowed and flag_trapping_math. */
1171 if (flag_trapping_math
1172 && MODE_HAS_INFINITIES (mode)
1173 && REAL_VALUE_ISINF (result)
1174 && !REAL_VALUE_ISINF (d1)
1175 && !REAL_VALUE_ISINF (d2))
1178 /* Don't constant fold this floating point operation if the
1179 result may dependent upon the run-time rounding mode and
1180 flag_rounding_math is set, or if GCC's software emulation
1181 is unable to accurately represent the result. */
1182 if ((flag_rounding_math
1183 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1184 && (inexact || !real_identical (&result, &value)))
1187 t = build_real (type, result);
1189 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1193 if (TREE_CODE (arg1) == FIXED_CST)
1195 FIXED_VALUE_TYPE f1;
1196 FIXED_VALUE_TYPE f2;
1197 FIXED_VALUE_TYPE result;
1202 /* The following codes are handled by fixed_arithmetic. */
1208 case TRUNC_DIV_EXPR:
1209 f2 = TREE_FIXED_CST (arg2);
1214 f2.data.high = TREE_INT_CST_HIGH (arg2);
1215 f2.data.low = TREE_INT_CST_LOW (arg2);
1223 f1 = TREE_FIXED_CST (arg1);
1224 type = TREE_TYPE (arg1);
1225 sat_p = TYPE_SATURATING (type);
1226 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1227 t = build_fixed (type, result);
1228 /* Propagate overflow flags. */
1229 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1230 TREE_OVERFLOW (t) = 1;
1234 if (TREE_CODE (arg1) == COMPLEX_CST)
1236 tree type = TREE_TYPE (arg1);
1237 tree r1 = TREE_REALPART (arg1);
1238 tree i1 = TREE_IMAGPART (arg1);
1239 tree r2 = TREE_REALPART (arg2);
1240 tree i2 = TREE_IMAGPART (arg2);
1247 real = const_binop (code, r1, r2);
1248 imag = const_binop (code, i1, i2);
1252 if (COMPLEX_FLOAT_TYPE_P (type))
1253 return do_mpc_arg2 (arg1, arg2, type,
1254 /* do_nonfinite= */ folding_initializer,
1257 real = const_binop (MINUS_EXPR,
1258 const_binop (MULT_EXPR, r1, r2),
1259 const_binop (MULT_EXPR, i1, i2));
1260 imag = const_binop (PLUS_EXPR,
1261 const_binop (MULT_EXPR, r1, i2),
1262 const_binop (MULT_EXPR, i1, r2));
1266 if (COMPLEX_FLOAT_TYPE_P (type))
1267 return do_mpc_arg2 (arg1, arg2, type,
1268 /* do_nonfinite= */ folding_initializer,
1271 case TRUNC_DIV_EXPR:
1273 case FLOOR_DIV_EXPR:
1274 case ROUND_DIV_EXPR:
1275 if (flag_complex_method == 0)
1277 /* Keep this algorithm in sync with
1278 tree-complex.c:expand_complex_div_straight().
1280 Expand complex division to scalars, straightforward algorithm.
1281 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1285 = const_binop (PLUS_EXPR,
1286 const_binop (MULT_EXPR, r2, r2),
1287 const_binop (MULT_EXPR, i2, i2));
1289 = const_binop (PLUS_EXPR,
1290 const_binop (MULT_EXPR, r1, r2),
1291 const_binop (MULT_EXPR, i1, i2));
1293 = const_binop (MINUS_EXPR,
1294 const_binop (MULT_EXPR, i1, r2),
1295 const_binop (MULT_EXPR, r1, i2));
1297 real = const_binop (code, t1, magsquared);
1298 imag = const_binop (code, t2, magsquared);
1302 /* Keep this algorithm in sync with
1303 tree-complex.c:expand_complex_div_wide().
1305 Expand complex division to scalars, modified algorithm to minimize
1306 overflow with wide input ranges. */
1307 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1308 fold_abs_const (r2, TREE_TYPE (type)),
1309 fold_abs_const (i2, TREE_TYPE (type)));
1311 if (integer_nonzerop (compare))
1313 /* In the TRUE branch, we compute
1315 div = (br * ratio) + bi;
1316 tr = (ar * ratio) + ai;
1317 ti = (ai * ratio) - ar;
1320 tree ratio = const_binop (code, r2, i2);
1321 tree div = const_binop (PLUS_EXPR, i2,
1322 const_binop (MULT_EXPR, r2, ratio));
1323 real = const_binop (MULT_EXPR, r1, ratio);
1324 real = const_binop (PLUS_EXPR, real, i1);
1325 real = const_binop (code, real, div);
1327 imag = const_binop (MULT_EXPR, i1, ratio);
1328 imag = const_binop (MINUS_EXPR, imag, r1);
1329 imag = const_binop (code, imag, div);
1333 /* In the FALSE branch, we compute
1335 divisor = (d * ratio) + c;
1336 tr = (b * ratio) + a;
1337 ti = b - (a * ratio);
1340 tree ratio = const_binop (code, i2, r2);
1341 tree div = const_binop (PLUS_EXPR, r2,
1342 const_binop (MULT_EXPR, i2, ratio));
1344 real = const_binop (MULT_EXPR, i1, ratio);
1345 real = const_binop (PLUS_EXPR, real, r1);
1346 real = const_binop (code, real, div);
1348 imag = const_binop (MULT_EXPR, r1, ratio);
1349 imag = const_binop (MINUS_EXPR, i1, imag);
1350 imag = const_binop (code, imag, div);
1360 return build_complex (type, real, imag);
1363 if (TREE_CODE (arg1) == VECTOR_CST)
1365 tree type = TREE_TYPE(arg1);
1366 int count = TYPE_VECTOR_SUBPARTS (type), i;
1367 tree elements1, elements2, list = NULL_TREE;
1369 if(TREE_CODE(arg2) != VECTOR_CST)
1372 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1373 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1375 for (i = 0; i < count; i++)
1377 tree elem1, elem2, elem;
1379 /* The trailing elements can be empty and should be treated as 0 */
1381 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1384 elem1 = TREE_VALUE(elements1);
1385 elements1 = TREE_CHAIN (elements1);
1389 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1392 elem2 = TREE_VALUE(elements2);
1393 elements2 = TREE_CHAIN (elements2);
1396 elem = const_binop (code, elem1, elem2);
1398 /* It is possible that const_binop cannot handle the given
1399 code and return NULL_TREE */
1400 if(elem == NULL_TREE)
1403 list = tree_cons (NULL_TREE, elem, list);
1405 return build_vector(type, nreverse(list));
1410 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1411 indicates which particular sizetype to create. */
1414 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1416 return build_int_cst (sizetype_tab[(int) kind], number);
1419 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1420 is a tree code. The type of the result is taken from the operands.
1421 Both must be equivalent integer types, ala int_binop_types_match_p.
1422 If the operands are constant, so is the result. */
1425 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1427 tree type = TREE_TYPE (arg0);
1429 if (arg0 == error_mark_node || arg1 == error_mark_node)
1430 return error_mark_node;
1432 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1435 /* Handle the special case of two integer constants faster. */
1436 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1438 /* And some specific cases even faster than that. */
1439 if (code == PLUS_EXPR)
1441 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1443 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1446 else if (code == MINUS_EXPR)
1448 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1451 else if (code == MULT_EXPR)
1453 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1457 /* Handle general case of two integer constants. */
1458 return int_const_binop (code, arg0, arg1, 0);
1461 return fold_build2_loc (loc, code, type, arg0, arg1);
1464 /* Given two values, either both of sizetype or both of bitsizetype,
1465 compute the difference between the two values. Return the value
1466 in signed type corresponding to the type of the operands. */
1469 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1471 tree type = TREE_TYPE (arg0);
1474 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1477 /* If the type is already signed, just do the simple thing. */
1478 if (!TYPE_UNSIGNED (type))
1479 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1481 if (type == sizetype)
1483 else if (type == bitsizetype)
1484 ctype = sbitsizetype;
1486 ctype = signed_type_for (type);
1488 /* If either operand is not a constant, do the conversions to the signed
1489 type and subtract. The hardware will do the right thing with any
1490 overflow in the subtraction. */
1491 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1492 return size_binop_loc (loc, MINUS_EXPR,
1493 fold_convert_loc (loc, ctype, arg0),
1494 fold_convert_loc (loc, ctype, arg1));
1496 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1497 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1498 overflow) and negate (which can't either). Special-case a result
1499 of zero while we're here. */
1500 if (tree_int_cst_equal (arg0, arg1))
1501 return build_int_cst (ctype, 0);
1502 else if (tree_int_cst_lt (arg1, arg0))
1503 return fold_convert_loc (loc, ctype,
1504 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1506 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1507 fold_convert_loc (loc, ctype,
1508 size_binop_loc (loc,
1513 /* A subroutine of fold_convert_const handling conversions of an
1514 INTEGER_CST to another integer type. */
1517 fold_convert_const_int_from_int (tree type, const_tree arg1)
1521 /* Given an integer constant, make new constant with new type,
1522 appropriately sign-extended or truncated. */
1523 t = force_fit_type_double (type, tree_to_double_int (arg1),
1524 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1525 (TREE_INT_CST_HIGH (arg1) < 0
1526 && (TYPE_UNSIGNED (type)
1527 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1528 | TREE_OVERFLOW (arg1));
1533 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1534 to an integer type. */
1537 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1542 /* The following code implements the floating point to integer
1543 conversion rules required by the Java Language Specification,
1544 that IEEE NaNs are mapped to zero and values that overflow
1545 the target precision saturate, i.e. values greater than
1546 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1547 are mapped to INT_MIN. These semantics are allowed by the
1548 C and C++ standards that simply state that the behavior of
1549 FP-to-integer conversion is unspecified upon overflow. */
1553 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1557 case FIX_TRUNC_EXPR:
1558 real_trunc (&r, VOIDmode, &x);
1565 /* If R is NaN, return zero and show we have an overflow. */
1566 if (REAL_VALUE_ISNAN (r))
1569 val = double_int_zero;
1572 /* See if R is less than the lower bound or greater than the
1577 tree lt = TYPE_MIN_VALUE (type);
1578 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1579 if (REAL_VALUES_LESS (r, l))
1582 val = tree_to_double_int (lt);
1588 tree ut = TYPE_MAX_VALUE (type);
1591 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1592 if (REAL_VALUES_LESS (u, r))
1595 val = tree_to_double_int (ut);
1601 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1603 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1607 /* A subroutine of fold_convert_const handling conversions of a
1608 FIXED_CST to an integer type. */
1611 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1614 double_int temp, temp_trunc;
1617 /* Right shift FIXED_CST to temp by fbit. */
1618 temp = TREE_FIXED_CST (arg1).data;
1619 mode = TREE_FIXED_CST (arg1).mode;
1620 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1622 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1623 HOST_BITS_PER_DOUBLE_INT,
1624 SIGNED_FIXED_POINT_MODE_P (mode));
1626 /* Left shift temp to temp_trunc by fbit. */
1627 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1628 HOST_BITS_PER_DOUBLE_INT,
1629 SIGNED_FIXED_POINT_MODE_P (mode));
1633 temp = double_int_zero;
1634 temp_trunc = double_int_zero;
1637 /* If FIXED_CST is negative, we need to round the value toward 0.
1638 By checking if the fractional bits are not zero to add 1 to temp. */
1639 if (SIGNED_FIXED_POINT_MODE_P (mode)
1640 && double_int_negative_p (temp_trunc)
1641 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1642 temp = double_int_add (temp, double_int_one);
1644 /* Given a fixed-point constant, make new constant with new type,
1645 appropriately sign-extended or truncated. */
1646 t = force_fit_type_double (type, temp, -1,
1647 (double_int_negative_p (temp)
1648 && (TYPE_UNSIGNED (type)
1649 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1650 | TREE_OVERFLOW (arg1));
1655 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1656 to another floating point type. */
1659 fold_convert_const_real_from_real (tree type, const_tree arg1)
1661 REAL_VALUE_TYPE value;
1664 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1665 t = build_real (type, value);
1667 /* If converting an infinity or NAN to a representation that doesn't
1668 have one, set the overflow bit so that we can produce some kind of
1669 error message at the appropriate point if necessary. It's not the
1670 most user-friendly message, but it's better than nothing. */
1671 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1672 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1673 TREE_OVERFLOW (t) = 1;
1674 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1675 && !MODE_HAS_NANS (TYPE_MODE (type)))
1676 TREE_OVERFLOW (t) = 1;
1677 /* Regular overflow, conversion produced an infinity in a mode that
1678 can't represent them. */
1679 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1680 && REAL_VALUE_ISINF (value)
1681 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1682 TREE_OVERFLOW (t) = 1;
1684 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1688 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1689 to a floating point type. */
1692 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1694 REAL_VALUE_TYPE value;
1697 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1698 t = build_real (type, value);
1700 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1704 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1705 to another fixed-point type. */
1708 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1710 FIXED_VALUE_TYPE value;
1714 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1715 TYPE_SATURATING (type));
1716 t = build_fixed (type, value);
1718 /* Propagate overflow flags. */
1719 if (overflow_p | TREE_OVERFLOW (arg1))
1720 TREE_OVERFLOW (t) = 1;
1724 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1725 to a fixed-point type. */
1728 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1730 FIXED_VALUE_TYPE value;
1734 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1735 TREE_INT_CST (arg1),
1736 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1737 TYPE_SATURATING (type));
1738 t = build_fixed (type, value);
1740 /* Propagate overflow flags. */
1741 if (overflow_p | TREE_OVERFLOW (arg1))
1742 TREE_OVERFLOW (t) = 1;
1746 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1747 to a fixed-point type. */
1750 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1752 FIXED_VALUE_TYPE value;
1756 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1757 &TREE_REAL_CST (arg1),
1758 TYPE_SATURATING (type));
1759 t = build_fixed (type, value);
1761 /* Propagate overflow flags. */
1762 if (overflow_p | TREE_OVERFLOW (arg1))
1763 TREE_OVERFLOW (t) = 1;
1767 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1768 type TYPE. If no simplification can be done return NULL_TREE. */
1771 fold_convert_const (enum tree_code code, tree type, tree arg1)
1773 if (TREE_TYPE (arg1) == type)
1776 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1777 || TREE_CODE (type) == OFFSET_TYPE)
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return fold_convert_const_int_from_int (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_int_from_real (code, type, arg1);
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_int_from_fixed (type, arg1);
1786 else if (TREE_CODE (type) == REAL_TYPE)
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return build_real_from_int_cst (type, arg1);
1790 else if (TREE_CODE (arg1) == REAL_CST)
1791 return fold_convert_const_real_from_real (type, arg1);
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_real_from_fixed (type, arg1);
1795 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1797 if (TREE_CODE (arg1) == FIXED_CST)
1798 return fold_convert_const_fixed_from_fixed (type, arg1);
1799 else if (TREE_CODE (arg1) == INTEGER_CST)
1800 return fold_convert_const_fixed_from_int (type, arg1);
1801 else if (TREE_CODE (arg1) == REAL_CST)
1802 return fold_convert_const_fixed_from_real (type, arg1);
1807 /* Construct a vector of zero elements of vector type TYPE. */
1810 build_zero_vector (tree type)
1814 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1815 return build_vector_from_val (type, t);
1818 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1821 fold_convertible_p (const_tree type, const_tree arg)
1823 tree orig = TREE_TYPE (arg);
1828 if (TREE_CODE (arg) == ERROR_MARK
1829 || TREE_CODE (type) == ERROR_MARK
1830 || TREE_CODE (orig) == ERROR_MARK)
1833 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1836 switch (TREE_CODE (type))
1838 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1839 case POINTER_TYPE: case REFERENCE_TYPE:
1841 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1842 || TREE_CODE (orig) == OFFSET_TYPE)
1844 return (TREE_CODE (orig) == VECTOR_TYPE
1845 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1848 case FIXED_POINT_TYPE:
1852 return TREE_CODE (type) == TREE_CODE (orig);
1859 /* Convert expression ARG to type TYPE. Used by the middle-end for
1860 simple conversions in preference to calling the front-end's convert. */
1863 fold_convert_loc (location_t loc, tree type, tree arg)
1865 tree orig = TREE_TYPE (arg);
1871 if (TREE_CODE (arg) == ERROR_MARK
1872 || TREE_CODE (type) == ERROR_MARK
1873 || TREE_CODE (orig) == ERROR_MARK)
1874 return error_mark_node;
1876 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1877 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1879 switch (TREE_CODE (type))
1882 case REFERENCE_TYPE:
1883 /* Handle conversions between pointers to different address spaces. */
1884 if (POINTER_TYPE_P (orig)
1885 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1886 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1887 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1890 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1892 if (TREE_CODE (arg) == INTEGER_CST)
1894 tem = fold_convert_const (NOP_EXPR, type, arg);
1895 if (tem != NULL_TREE)
1898 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1899 || TREE_CODE (orig) == OFFSET_TYPE)
1900 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1901 if (TREE_CODE (orig) == COMPLEX_TYPE)
1902 return fold_convert_loc (loc, type,
1903 fold_build1_loc (loc, REALPART_EXPR,
1904 TREE_TYPE (orig), arg));
1905 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1906 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1907 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1910 if (TREE_CODE (arg) == INTEGER_CST)
1912 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1916 else if (TREE_CODE (arg) == REAL_CST)
1918 tem = fold_convert_const (NOP_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1922 else if (TREE_CODE (arg) == FIXED_CST)
1924 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1925 if (tem != NULL_TREE)
1929 switch (TREE_CODE (orig))
1932 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1933 case POINTER_TYPE: case REFERENCE_TYPE:
1934 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1937 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1939 case FIXED_POINT_TYPE:
1940 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1943 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1944 return fold_convert_loc (loc, type, tem);
1950 case FIXED_POINT_TYPE:
1951 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1952 || TREE_CODE (arg) == REAL_CST)
1954 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
1956 goto fold_convert_exit;
1959 switch (TREE_CODE (orig))
1961 case FIXED_POINT_TYPE:
1966 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1969 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1970 return fold_convert_loc (loc, type, tem);
1977 switch (TREE_CODE (orig))
1980 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1981 case POINTER_TYPE: case REFERENCE_TYPE:
1983 case FIXED_POINT_TYPE:
1984 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1985 fold_convert_loc (loc, TREE_TYPE (type), arg),
1986 fold_convert_loc (loc, TREE_TYPE (type),
1987 integer_zero_node));
1992 if (TREE_CODE (arg) == COMPLEX_EXPR)
1994 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1995 TREE_OPERAND (arg, 0));
1996 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1997 TREE_OPERAND (arg, 1));
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2001 arg = save_expr (arg);
2002 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2003 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2004 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2005 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2006 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2014 if (integer_zerop (arg))
2015 return build_zero_vector (type);
2016 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2017 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2018 || TREE_CODE (orig) == VECTOR_TYPE);
2019 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2022 tem = fold_ignored_result (arg);
2023 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2029 protected_set_expr_location_unshare (tem, loc);
2033 /* Return false if expr can be assumed not to be an lvalue, true
2037 maybe_lvalue_p (const_tree x)
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x))
2053 case ARRAY_RANGE_REF:
2059 case PREINCREMENT_EXPR:
2060 case PREDECREMENT_EXPR:
2062 case TRY_CATCH_EXPR:
2063 case WITH_CLEANUP_EXPR:
2072 /* Assume the worst for front-end tree codes. */
2073 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2081 /* Return an expr equal to X but certainly not valid as an lvalue. */
2084 non_lvalue_loc (location_t loc, tree x)
2086 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2091 if (! maybe_lvalue_p (x))
2093 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2096 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2097 Zero means allow extended lvalues. */
2099 int pedantic_lvalues;
2101 /* When pedantic, return an expr equal to X but certainly not valid as a
2102 pedantic lvalue. Otherwise, return X. */
2105 pedantic_non_lvalue_loc (location_t loc, tree x)
2107 if (pedantic_lvalues)
2108 return non_lvalue_loc (loc, x);
2110 return protected_set_expr_location_unshare (x, loc);
2113 /* Given a tree comparison code, return the code that is the logical inverse
2114 of the given code. It is not safe to do this for floating-point
2115 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2116 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2119 invert_tree_comparison (enum tree_code code, bool honor_nans)
2121 if (honor_nans && flag_trapping_math)
2131 return honor_nans ? UNLE_EXPR : LE_EXPR;
2133 return honor_nans ? UNLT_EXPR : LT_EXPR;
2135 return honor_nans ? UNGE_EXPR : GE_EXPR;
2137 return honor_nans ? UNGT_EXPR : GT_EXPR;
2151 return UNORDERED_EXPR;
2152 case UNORDERED_EXPR:
2153 return ORDERED_EXPR;
2159 /* Similar, but return the comparison that results if the operands are
2160 swapped. This is safe for floating-point. */
2163 swap_tree_comparison (enum tree_code code)
2170 case UNORDERED_EXPR:
2196 /* Convert a comparison tree code from an enum tree_code representation
2197 into a compcode bit-based encoding. This function is the inverse of
2198 compcode_to_comparison. */
2200 static enum comparison_code
2201 comparison_to_compcode (enum tree_code code)
2218 return COMPCODE_ORD;
2219 case UNORDERED_EXPR:
2220 return COMPCODE_UNORD;
2222 return COMPCODE_UNLT;
2224 return COMPCODE_UNEQ;
2226 return COMPCODE_UNLE;
2228 return COMPCODE_UNGT;
2230 return COMPCODE_LTGT;
2232 return COMPCODE_UNGE;
2238 /* Convert a compcode bit-based encoding of a comparison operator back
2239 to GCC's enum tree_code representation. This function is the
2240 inverse of comparison_to_compcode. */
2242 static enum tree_code
2243 compcode_to_comparison (enum comparison_code code)
2260 return ORDERED_EXPR;
2261 case COMPCODE_UNORD:
2262 return UNORDERED_EXPR;
2280 /* Return a tree for the comparison which is the combination of
2281 doing the AND or OR (depending on CODE) of the two operations LCODE
2282 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2283 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2284 if this makes the transformation invalid. */
2287 combine_comparisons (location_t loc,
2288 enum tree_code code, enum tree_code lcode,
2289 enum tree_code rcode, tree truth_type,
2290 tree ll_arg, tree lr_arg)
2292 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2293 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2294 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2299 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2300 compcode = lcompcode & rcompcode;
2303 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2304 compcode = lcompcode | rcompcode;
2313 /* Eliminate unordered comparisons, as well as LTGT and ORD
2314 which are not used unless the mode has NaNs. */
2315 compcode &= ~COMPCODE_UNORD;
2316 if (compcode == COMPCODE_LTGT)
2317 compcode = COMPCODE_NE;
2318 else if (compcode == COMPCODE_ORD)
2319 compcode = COMPCODE_TRUE;
2321 else if (flag_trapping_math)
2323 /* Check that the original operation and the optimized ones will trap
2324 under the same condition. */
2325 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2326 && (lcompcode != COMPCODE_EQ)
2327 && (lcompcode != COMPCODE_ORD);
2328 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2329 && (rcompcode != COMPCODE_EQ)
2330 && (rcompcode != COMPCODE_ORD);
2331 bool trap = (compcode & COMPCODE_UNORD) == 0
2332 && (compcode != COMPCODE_EQ)
2333 && (compcode != COMPCODE_ORD);
2335 /* In a short-circuited boolean expression the LHS might be
2336 such that the RHS, if evaluated, will never trap. For
2337 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2338 if neither x nor y is NaN. (This is a mixed blessing: for
2339 example, the expression above will never trap, hence
2340 optimizing it to x < y would be invalid). */
2341 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2342 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2345 /* If the comparison was short-circuited, and only the RHS
2346 trapped, we may now generate a spurious trap. */
2348 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2351 /* If we changed the conditions that cause a trap, we lose. */
2352 if ((ltrap || rtrap) != trap)
2356 if (compcode == COMPCODE_TRUE)
2357 return constant_boolean_node (true, truth_type);
2358 else if (compcode == COMPCODE_FALSE)
2359 return constant_boolean_node (false, truth_type);
2362 enum tree_code tcode;
2364 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2365 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2369 /* Return nonzero if two operands (typically of the same tree node)
2370 are necessarily equal. If either argument has side-effects this
2371 function returns zero. FLAGS modifies behavior as follows:
2373 If OEP_ONLY_CONST is set, only return nonzero for constants.
2374 This function tests whether the operands are indistinguishable;
2375 it does not test whether they are equal using C's == operation.
2376 The distinction is important for IEEE floating point, because
2377 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2378 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2380 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2381 even though it may hold multiple values during a function.
2382 This is because a GCC tree node guarantees that nothing else is
2383 executed between the evaluation of its "operands" (which may often
2384 be evaluated in arbitrary order). Hence if the operands themselves
2385 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2386 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2387 unset means assuming isochronic (or instantaneous) tree equivalence.
2388 Unless comparing arbitrary expression trees, such as from different
2389 statements, this flag can usually be left unset.
2391 If OEP_PURE_SAME is set, then pure functions with identical arguments
2392 are considered the same. It is used when the caller has other ways
2393 to ensure that global memory is unchanged in between. */
2396 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2398 /* If either is ERROR_MARK, they aren't equal. */
2399 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2400 || TREE_TYPE (arg0) == error_mark_node
2401 || TREE_TYPE (arg1) == error_mark_node)
2404 /* Similar, if either does not have a type (like a released SSA name),
2405 they aren't equal. */
2406 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2409 /* Check equality of integer constants before bailing out due to
2410 precision differences. */
2411 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2412 return tree_int_cst_equal (arg0, arg1);
2414 /* If both types don't have the same signedness, then we can't consider
2415 them equal. We must check this before the STRIP_NOPS calls
2416 because they may change the signedness of the arguments. As pointers
2417 strictly don't have a signedness, require either two pointers or
2418 two non-pointers as well. */
2419 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2420 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2423 /* We cannot consider pointers to different address space equal. */
2424 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2425 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2426 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2429 /* If both types don't have the same precision, then it is not safe
2431 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2437 /* In case both args are comparisons but with different comparison
2438 code, try to swap the comparison operands of one arg to produce
2439 a match and compare that variant. */
2440 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2441 && COMPARISON_CLASS_P (arg0)
2442 && COMPARISON_CLASS_P (arg1))
2444 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2446 if (TREE_CODE (arg0) == swap_code)
2447 return operand_equal_p (TREE_OPERAND (arg0, 0),
2448 TREE_OPERAND (arg1, 1), flags)
2449 && operand_equal_p (TREE_OPERAND (arg0, 1),
2450 TREE_OPERAND (arg1, 0), flags);
2453 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2454 /* This is needed for conversions and for COMPONENT_REF.
2455 Might as well play it safe and always test this. */
2456 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2457 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2458 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2461 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2462 We don't care about side effects in that case because the SAVE_EXPR
2463 takes care of that for us. In all other cases, two expressions are
2464 equal if they have no side effects. If we have two identical
2465 expressions with side effects that should be treated the same due
2466 to the only side effects being identical SAVE_EXPR's, that will
2467 be detected in the recursive calls below. */
2468 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2469 && (TREE_CODE (arg0) == SAVE_EXPR
2470 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2473 /* Next handle constant cases, those for which we can return 1 even
2474 if ONLY_CONST is set. */
2475 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2476 switch (TREE_CODE (arg0))
2479 return tree_int_cst_equal (arg0, arg1);
2482 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2483 TREE_FIXED_CST (arg1));
2486 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2487 TREE_REAL_CST (arg1)))
2491 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2493 /* If we do not distinguish between signed and unsigned zero,
2494 consider them equal. */
2495 if (real_zerop (arg0) && real_zerop (arg1))
2504 v1 = TREE_VECTOR_CST_ELTS (arg0);
2505 v2 = TREE_VECTOR_CST_ELTS (arg1);
2508 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2511 v1 = TREE_CHAIN (v1);
2512 v2 = TREE_CHAIN (v2);
2519 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2521 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2525 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2526 && ! memcmp (TREE_STRING_POINTER (arg0),
2527 TREE_STRING_POINTER (arg1),
2528 TREE_STRING_LENGTH (arg0)));
2531 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2537 if (flags & OEP_ONLY_CONST)
2540 /* Define macros to test an operand from arg0 and arg1 for equality and a
2541 variant that allows null and views null as being different from any
2542 non-null value. In the latter case, if either is null, the both
2543 must be; otherwise, do the normal comparison. */
2544 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2545 TREE_OPERAND (arg1, N), flags)
2547 #define OP_SAME_WITH_NULL(N) \
2548 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2549 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2551 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2554 /* Two conversions are equal only if signedness and modes match. */
2555 switch (TREE_CODE (arg0))
2558 case FIX_TRUNC_EXPR:
2559 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2560 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2570 case tcc_comparison:
2572 if (OP_SAME (0) && OP_SAME (1))
2575 /* For commutative ops, allow the other order. */
2576 return (commutative_tree_code (TREE_CODE (arg0))
2577 && operand_equal_p (TREE_OPERAND (arg0, 0),
2578 TREE_OPERAND (arg1, 1), flags)
2579 && operand_equal_p (TREE_OPERAND (arg0, 1),
2580 TREE_OPERAND (arg1, 0), flags));
2583 /* If either of the pointer (or reference) expressions we are
2584 dereferencing contain a side effect, these cannot be equal. */
2585 if (TREE_SIDE_EFFECTS (arg0)
2586 || TREE_SIDE_EFFECTS (arg1))
2589 switch (TREE_CODE (arg0))
2597 /* Require equal access sizes, and similar pointer types.
2598 We can have incomplete types for array references of
2599 variable-sized arrays from the Fortran frontent
2601 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2602 || (TYPE_SIZE (TREE_TYPE (arg0))
2603 && TYPE_SIZE (TREE_TYPE (arg1))
2604 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2605 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2606 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2607 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2608 && OP_SAME (0) && OP_SAME (1));
2611 case ARRAY_RANGE_REF:
2612 /* Operands 2 and 3 may be null.
2613 Compare the array index by value if it is constant first as we
2614 may have different types but same value here. */
2616 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2617 TREE_OPERAND (arg1, 1))
2619 && OP_SAME_WITH_NULL (2)
2620 && OP_SAME_WITH_NULL (3));
2623 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2624 may be NULL when we're called to compare MEM_EXPRs. */
2625 return OP_SAME_WITH_NULL (0)
2627 && OP_SAME_WITH_NULL (2);
2630 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2636 case tcc_expression:
2637 switch (TREE_CODE (arg0))
2640 case TRUTH_NOT_EXPR:
2643 case TRUTH_ANDIF_EXPR:
2644 case TRUTH_ORIF_EXPR:
2645 return OP_SAME (0) && OP_SAME (1);
2648 case WIDEN_MULT_PLUS_EXPR:
2649 case WIDEN_MULT_MINUS_EXPR:
2652 /* The multiplcation operands are commutative. */
2655 case TRUTH_AND_EXPR:
2657 case TRUTH_XOR_EXPR:
2658 if (OP_SAME (0) && OP_SAME (1))
2661 /* Otherwise take into account this is a commutative operation. */
2662 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2663 TREE_OPERAND (arg1, 1), flags)
2664 && operand_equal_p (TREE_OPERAND (arg0, 1),
2665 TREE_OPERAND (arg1, 0), flags));
2670 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2677 switch (TREE_CODE (arg0))
2680 /* If the CALL_EXPRs call different functions, then they
2681 clearly can not be equal. */
2682 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2687 unsigned int cef = call_expr_flags (arg0);
2688 if (flags & OEP_PURE_SAME)
2689 cef &= ECF_CONST | ECF_PURE;
2696 /* Now see if all the arguments are the same. */
2698 const_call_expr_arg_iterator iter0, iter1;
2700 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2701 a1 = first_const_call_expr_arg (arg1, &iter1);
2703 a0 = next_const_call_expr_arg (&iter0),
2704 a1 = next_const_call_expr_arg (&iter1))
2705 if (! operand_equal_p (a0, a1, flags))
2708 /* If we get here and both argument lists are exhausted
2709 then the CALL_EXPRs are equal. */
2710 return ! (a0 || a1);
2716 case tcc_declaration:
2717 /* Consider __builtin_sqrt equal to sqrt. */
2718 return (TREE_CODE (arg0) == FUNCTION_DECL
2719 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2720 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2721 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2728 #undef OP_SAME_WITH_NULL
2731 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2732 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2734 When in doubt, return 0. */
2737 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2739 int unsignedp1, unsignedpo;
2740 tree primarg0, primarg1, primother;
2741 unsigned int correct_width;
2743 if (operand_equal_p (arg0, arg1, 0))
2746 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2747 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2750 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2751 and see if the inner values are the same. This removes any
2752 signedness comparison, which doesn't matter here. */
2753 primarg0 = arg0, primarg1 = arg1;
2754 STRIP_NOPS (primarg0);
2755 STRIP_NOPS (primarg1);
2756 if (operand_equal_p (primarg0, primarg1, 0))
2759 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2760 actual comparison operand, ARG0.
2762 First throw away any conversions to wider types
2763 already present in the operands. */
2765 primarg1 = get_narrower (arg1, &unsignedp1);
2766 primother = get_narrower (other, &unsignedpo);
2768 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2769 if (unsignedp1 == unsignedpo
2770 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2771 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2773 tree type = TREE_TYPE (arg0);
2775 /* Make sure shorter operand is extended the right way
2776 to match the longer operand. */
2777 primarg1 = fold_convert (signed_or_unsigned_type_for
2778 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2780 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2787 /* See if ARG is an expression that is either a comparison or is performing
2788 arithmetic on comparisons. The comparisons must only be comparing
2789 two different values, which will be stored in *CVAL1 and *CVAL2; if
2790 they are nonzero it means that some operands have already been found.
2791 No variables may be used anywhere else in the expression except in the
2792 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2793 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2795 If this is true, return 1. Otherwise, return zero. */
2798 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2800 enum tree_code code = TREE_CODE (arg);
2801 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2803 /* We can handle some of the tcc_expression cases here. */
2804 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2806 else if (tclass == tcc_expression
2807 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2808 || code == COMPOUND_EXPR))
2809 tclass = tcc_binary;
2811 else if (tclass == tcc_expression && code == SAVE_EXPR
2812 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2814 /* If we've already found a CVAL1 or CVAL2, this expression is
2815 two complex to handle. */
2816 if (*cval1 || *cval2)
2826 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2829 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2830 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2831 cval1, cval2, save_p));
2836 case tcc_expression:
2837 if (code == COND_EXPR)
2838 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2841 cval1, cval2, save_p)
2842 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2843 cval1, cval2, save_p));
2846 case tcc_comparison:
2847 /* First see if we can handle the first operand, then the second. For
2848 the second operand, we know *CVAL1 can't be zero. It must be that
2849 one side of the comparison is each of the values; test for the
2850 case where this isn't true by failing if the two operands
2853 if (operand_equal_p (TREE_OPERAND (arg, 0),
2854 TREE_OPERAND (arg, 1), 0))
2858 *cval1 = TREE_OPERAND (arg, 0);
2859 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2861 else if (*cval2 == 0)
2862 *cval2 = TREE_OPERAND (arg, 0);
2863 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2868 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2870 else if (*cval2 == 0)
2871 *cval2 = TREE_OPERAND (arg, 1);
2872 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2884 /* ARG is a tree that is known to contain just arithmetic operations and
2885 comparisons. Evaluate the operations in the tree substituting NEW0 for
2886 any occurrence of OLD0 as an operand of a comparison and likewise for
2890 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2891 tree old1, tree new1)
2893 tree type = TREE_TYPE (arg);
2894 enum tree_code code = TREE_CODE (arg);
2895 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2897 /* We can handle some of the tcc_expression cases here. */
2898 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2900 else if (tclass == tcc_expression
2901 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2902 tclass = tcc_binary;
2907 return fold_build1_loc (loc, code, type,
2908 eval_subst (loc, TREE_OPERAND (arg, 0),
2909 old0, new0, old1, new1));
2912 return fold_build2_loc (loc, code, type,
2913 eval_subst (loc, TREE_OPERAND (arg, 0),
2914 old0, new0, old1, new1),
2915 eval_subst (loc, TREE_OPERAND (arg, 1),
2916 old0, new0, old1, new1));
2918 case tcc_expression:
2922 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2926 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2930 return fold_build3_loc (loc, code, type,
2931 eval_subst (loc, TREE_OPERAND (arg, 0),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 1),
2934 old0, new0, old1, new1),
2935 eval_subst (loc, TREE_OPERAND (arg, 2),
2936 old0, new0, old1, new1));
2940 /* Fall through - ??? */
2942 case tcc_comparison:
2944 tree arg0 = TREE_OPERAND (arg, 0);
2945 tree arg1 = TREE_OPERAND (arg, 1);
2947 /* We need to check both for exact equality and tree equality. The
2948 former will be true if the operand has a side-effect. In that
2949 case, we know the operand occurred exactly once. */
2951 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2953 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2956 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2958 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2961 return fold_build2_loc (loc, code, type, arg0, arg1);
2969 /* Return a tree for the case when the result of an expression is RESULT
2970 converted to TYPE and OMITTED was previously an operand of the expression
2971 but is now not needed (e.g., we folded OMITTED * 0).
2973 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2974 the conversion of RESULT to TYPE. */
2977 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2979 tree t = fold_convert_loc (loc, type, result);
2981 /* If the resulting operand is an empty statement, just return the omitted
2982 statement casted to void. */
2983 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2984 return build1_loc (loc, NOP_EXPR, void_type_node,
2985 fold_ignored_result (omitted));
2987 if (TREE_SIDE_EFFECTS (omitted))
2988 return build2_loc (loc, COMPOUND_EXPR, type,
2989 fold_ignored_result (omitted), t);
2991 return non_lvalue_loc (loc, t);
2994 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2997 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3000 tree t = fold_convert_loc (loc, type, result);
3002 /* If the resulting operand is an empty statement, just return the omitted
3003 statement casted to void. */
3004 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3005 return build1_loc (loc, NOP_EXPR, void_type_node,
3006 fold_ignored_result (omitted));
3008 if (TREE_SIDE_EFFECTS (omitted))
3009 return build2_loc (loc, COMPOUND_EXPR, type,
3010 fold_ignored_result (omitted), t);
3012 return pedantic_non_lvalue_loc (loc, t);
3015 /* Return a tree for the case when the result of an expression is RESULT
3016 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3017 of the expression but are now not needed.
3019 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3020 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3021 evaluated before OMITTED2. Otherwise, if neither has side effects,
3022 just do the conversion of RESULT to TYPE. */
3025 omit_two_operands_loc (location_t loc, tree type, tree result,
3026 tree omitted1, tree omitted2)
3028 tree t = fold_convert_loc (loc, type, result);
3030 if (TREE_SIDE_EFFECTS (omitted2))
3031 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3032 if (TREE_SIDE_EFFECTS (omitted1))
3033 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3035 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3039 /* Return a simplified tree node for the truth-negation of ARG. This
3040 never alters ARG itself. We assume that ARG is an operation that
3041 returns a truth value (0 or 1).
3043 FIXME: one would think we would fold the result, but it causes
3044 problems with the dominator optimizer. */
3047 fold_truth_not_expr (location_t loc, tree arg)
3049 tree type = TREE_TYPE (arg);
3050 enum tree_code code = TREE_CODE (arg);
3051 location_t loc1, loc2;
3053 /* If this is a comparison, we can simply invert it, except for
3054 floating-point non-equality comparisons, in which case we just
3055 enclose a TRUTH_NOT_EXPR around what we have. */
3057 if (TREE_CODE_CLASS (code) == tcc_comparison)
3059 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3060 if (FLOAT_TYPE_P (op_type)
3061 && flag_trapping_math
3062 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3063 && code != NE_EXPR && code != EQ_EXPR)
3066 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3067 if (code == ERROR_MARK)
3070 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3071 TREE_OPERAND (arg, 1));
3077 return constant_boolean_node (integer_zerop (arg), type);
3079 case TRUTH_AND_EXPR:
3080 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3081 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3082 if (loc1 == UNKNOWN_LOCATION)
3084 if (loc2 == UNKNOWN_LOCATION)
3086 return build2_loc (loc, TRUTH_OR_EXPR, type,
3087 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3088 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3091 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3092 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3093 if (loc1 == UNKNOWN_LOCATION)
3095 if (loc2 == UNKNOWN_LOCATION)
3097 return build2_loc (loc, TRUTH_AND_EXPR, type,
3098 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3099 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3101 case TRUTH_XOR_EXPR:
3102 /* Here we can invert either operand. We invert the first operand
3103 unless the second operand is a TRUTH_NOT_EXPR in which case our
3104 result is the XOR of the first operand with the inside of the
3105 negation of the second operand. */
3107 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3108 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3109 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3111 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3112 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3113 TREE_OPERAND (arg, 1));
3115 case TRUTH_ANDIF_EXPR:
3116 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3117 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3118 if (loc1 == UNKNOWN_LOCATION)
3120 if (loc2 == UNKNOWN_LOCATION)
3122 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3123 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3124 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3126 case TRUTH_ORIF_EXPR:
3127 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3128 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3129 if (loc1 == UNKNOWN_LOCATION)
3131 if (loc2 == UNKNOWN_LOCATION)
3133 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3134 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3135 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3137 case TRUTH_NOT_EXPR:
3138 return TREE_OPERAND (arg, 0);
3142 tree arg1 = TREE_OPERAND (arg, 1);
3143 tree arg2 = TREE_OPERAND (arg, 2);
3145 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3146 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3147 if (loc1 == UNKNOWN_LOCATION)
3149 if (loc2 == UNKNOWN_LOCATION)
3152 /* A COND_EXPR may have a throw as one operand, which
3153 then has void type. Just leave void operands
3155 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3156 VOID_TYPE_P (TREE_TYPE (arg1))
3157 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3158 VOID_TYPE_P (TREE_TYPE (arg2))
3159 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3163 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3164 if (loc1 == UNKNOWN_LOCATION)
3166 return build2_loc (loc, COMPOUND_EXPR, type,
3167 TREE_OPERAND (arg, 0),
3168 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3170 case NON_LVALUE_EXPR:
3171 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3172 if (loc1 == UNKNOWN_LOCATION)
3174 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3177 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3178 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3180 /* ... fall through ... */
3183 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3184 if (loc1 == UNKNOWN_LOCATION)
3186 return build1_loc (loc, TREE_CODE (arg), type,
3187 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3190 if (!integer_onep (TREE_OPERAND (arg, 1)))
3192 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3195 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3197 case CLEANUP_POINT_EXPR:
3198 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3199 if (loc1 == UNKNOWN_LOCATION)
3201 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3202 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3209 /* Return a simplified tree node for the truth-negation of ARG. This
3210 never alters ARG itself. We assume that ARG is an operation that
3211 returns a truth value (0 or 1).
3213 FIXME: one would think we would fold the result, but it causes
3214 problems with the dominator optimizer. */
3217 invert_truthvalue_loc (location_t loc, tree arg)
3221 if (TREE_CODE (arg) == ERROR_MARK)
3224 tem = fold_truth_not_expr (loc, arg);
3226 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3231 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3232 operands are another bit-wise operation with a common input. If so,
3233 distribute the bit operations to save an operation and possibly two if
3234 constants are involved. For example, convert
3235 (A | B) & (A | C) into A | (B & C)
3236 Further simplification will occur if B and C are constants.
3238 If this optimization cannot be done, 0 will be returned. */
3241 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3242 tree arg0, tree arg1)
3247 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3248 || TREE_CODE (arg0) == code
3249 || (TREE_CODE (arg0) != BIT_AND_EXPR
3250 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3253 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3255 common = TREE_OPERAND (arg0, 0);
3256 left = TREE_OPERAND (arg0, 1);
3257 right = TREE_OPERAND (arg1, 1);
3259 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3261 common = TREE_OPERAND (arg0, 0);
3262 left = TREE_OPERAND (arg0, 1);
3263 right = TREE_OPERAND (arg1, 0);
3265 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3267 common = TREE_OPERAND (arg0, 1);
3268 left = TREE_OPERAND (arg0, 0);
3269 right = TREE_OPERAND (arg1, 1);
3271 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3273 common = TREE_OPERAND (arg0, 1);
3274 left = TREE_OPERAND (arg0, 0);
3275 right = TREE_OPERAND (arg1, 0);
3280 common = fold_convert_loc (loc, type, common);
3281 left = fold_convert_loc (loc, type, left);
3282 right = fold_convert_loc (loc, type, right);
3283 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3284 fold_build2_loc (loc, code, type, left, right));
3287 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3288 with code CODE. This optimization is unsafe. */
3290 distribute_real_division (location_t loc, enum tree_code code, tree type,
3291 tree arg0, tree arg1)
3293 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3294 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3296 /* (A / C) +- (B / C) -> (A +- B) / C. */
3298 && operand_equal_p (TREE_OPERAND (arg0, 1),
3299 TREE_OPERAND (arg1, 1), 0))
3300 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3301 fold_build2_loc (loc, code, type,
3302 TREE_OPERAND (arg0, 0),
3303 TREE_OPERAND (arg1, 0)),
3304 TREE_OPERAND (arg0, 1));
3306 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3307 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3308 TREE_OPERAND (arg1, 0), 0)
3309 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3310 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3312 REAL_VALUE_TYPE r0, r1;
3313 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3314 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3316 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3318 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3319 real_arithmetic (&r0, code, &r0, &r1);
3320 return fold_build2_loc (loc, MULT_EXPR, type,
3321 TREE_OPERAND (arg0, 0),
3322 build_real (type, r0));
3328 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3329 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3332 make_bit_field_ref (location_t loc, tree inner, tree type,
3333 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3335 tree result, bftype;
3339 tree size = TYPE_SIZE (TREE_TYPE (inner));
3340 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3341 || POINTER_TYPE_P (TREE_TYPE (inner)))
3342 && host_integerp (size, 0)
3343 && tree_low_cst (size, 0) == bitsize)
3344 return fold_convert_loc (loc, type, inner);
3348 if (TYPE_PRECISION (bftype) != bitsize
3349 || TYPE_UNSIGNED (bftype) == !unsignedp)
3350 bftype = build_nonstandard_integer_type (bitsize, 0);
3352 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3353 size_int (bitsize), bitsize_int (bitpos));
3356 result = fold_convert_loc (loc, type, result);
3361 /* Optimize a bit-field compare.
3363 There are two cases: First is a compare against a constant and the
3364 second is a comparison of two items where the fields are at the same
3365 bit position relative to the start of a chunk (byte, halfword, word)
3366 large enough to contain it. In these cases we can avoid the shift
3367 implicit in bitfield extractions.
3369 For constants, we emit a compare of the shifted constant with the
3370 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3371 compared. For two fields at the same position, we do the ANDs with the
3372 similar mask and compare the result of the ANDs.
3374 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3375 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3376 are the left and right operands of the comparison, respectively.
3378 If the optimization described above can be done, we return the resulting
3379 tree. Otherwise we return zero. */
3382 optimize_bit_field_compare (location_t loc, enum tree_code code,
3383 tree compare_type, tree lhs, tree rhs)
3385 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3386 tree type = TREE_TYPE (lhs);
3387 tree signed_type, unsigned_type;
3388 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3389 enum machine_mode lmode, rmode, nmode;
3390 int lunsignedp, runsignedp;
3391 int lvolatilep = 0, rvolatilep = 0;
3392 tree linner, rinner = NULL_TREE;
3396 /* Get all the information about the extractions being done. If the bit size
3397 if the same as the size of the underlying object, we aren't doing an
3398 extraction at all and so can do nothing. We also don't want to
3399 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3400 then will no longer be able to replace it. */
3401 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3402 &lunsignedp, &lvolatilep, false);
3403 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3404 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3409 /* If this is not a constant, we can only do something if bit positions,
3410 sizes, and signedness are the same. */
3411 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3412 &runsignedp, &rvolatilep, false);
3414 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3415 || lunsignedp != runsignedp || offset != 0
3416 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3420 /* See if we can find a mode to refer to this field. We should be able to,
3421 but fail if we can't. */
3423 && GET_MODE_BITSIZE (lmode) > 0
3424 && flag_strict_volatile_bitfields > 0)
3427 nmode = get_best_mode (lbitsize, lbitpos,
3428 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3429 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3430 TYPE_ALIGN (TREE_TYPE (rinner))),
3431 word_mode, lvolatilep || rvolatilep);
3432 if (nmode == VOIDmode)
3435 /* Set signed and unsigned types of the precision of this mode for the
3437 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3438 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3440 /* Compute the bit position and size for the new reference and our offset
3441 within it. If the new reference is the same size as the original, we
3442 won't optimize anything, so return zero. */
3443 nbitsize = GET_MODE_BITSIZE (nmode);
3444 nbitpos = lbitpos & ~ (nbitsize - 1);
3446 if (nbitsize == lbitsize)
3449 if (BYTES_BIG_ENDIAN)
3450 lbitpos = nbitsize - lbitsize - lbitpos;
3452 /* Make the mask to be used against the extracted field. */
3453 mask = build_int_cst_type (unsigned_type, -1);
3454 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3455 mask = const_binop (RSHIFT_EXPR, mask,
3456 size_int (nbitsize - lbitsize - lbitpos));
3459 /* If not comparing with constant, just rework the comparison
3461 return fold_build2_loc (loc, code, compare_type,
3462 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3463 make_bit_field_ref (loc, linner,
3468 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3469 make_bit_field_ref (loc, rinner,
3475 /* Otherwise, we are handling the constant case. See if the constant is too
3476 big for the field. Warn and return a tree of for 0 (false) if so. We do
3477 this not only for its own sake, but to avoid having to test for this
3478 error case below. If we didn't, we might generate wrong code.
3480 For unsigned fields, the constant shifted right by the field length should
3481 be all zero. For signed fields, the high-order bits should agree with
3486 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3487 fold_convert_loc (loc,
3488 unsigned_type, rhs),
3489 size_int (lbitsize))))
3491 warning (0, "comparison is always %d due to width of bit-field",
3493 return constant_boolean_node (code == NE_EXPR, compare_type);
3498 tree tem = const_binop (RSHIFT_EXPR,
3499 fold_convert_loc (loc, signed_type, rhs),
3500 size_int (lbitsize - 1));
3501 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3503 warning (0, "comparison is always %d due to width of bit-field",
3505 return constant_boolean_node (code == NE_EXPR, compare_type);
3509 /* Single-bit compares should always be against zero. */
3510 if (lbitsize == 1 && ! integer_zerop (rhs))
3512 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3513 rhs = build_int_cst (type, 0);
3516 /* Make a new bitfield reference, shift the constant over the
3517 appropriate number of bits and mask it with the computed mask
3518 (in case this was a signed field). If we changed it, make a new one. */
3519 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3522 TREE_SIDE_EFFECTS (lhs) = 1;
3523 TREE_THIS_VOLATILE (lhs) = 1;
3526 rhs = const_binop (BIT_AND_EXPR,
3527 const_binop (LSHIFT_EXPR,
3528 fold_convert_loc (loc, unsigned_type, rhs),
3529 size_int (lbitpos)),
3532 lhs = build2_loc (loc, code, compare_type,
3533 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3537 /* Subroutine for fold_truthop: decode a field reference.
3539 If EXP is a comparison reference, we return the innermost reference.
3541 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3542 set to the starting bit number.
3544 If the innermost field can be completely contained in a mode-sized
3545 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3547 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3548 otherwise it is not changed.
3550 *PUNSIGNEDP is set to the signedness of the field.
3552 *PMASK is set to the mask used. This is either contained in a
3553 BIT_AND_EXPR or derived from the width of the field.
3555 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3557 Return 0 if this is not a component reference or is one that we can't
3558 do anything with. */
3561 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3562 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3563 int *punsignedp, int *pvolatilep,
3564 tree *pmask, tree *pand_mask)
3566 tree outer_type = 0;
3568 tree mask, inner, offset;
3570 unsigned int precision;
3572 /* All the optimizations using this function assume integer fields.
3573 There are problems with FP fields since the type_for_size call
3574 below can fail for, e.g., XFmode. */
3575 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3578 /* We are interested in the bare arrangement of bits, so strip everything
3579 that doesn't affect the machine mode. However, record the type of the
3580 outermost expression if it may matter below. */
3581 if (CONVERT_EXPR_P (exp)
3582 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3583 outer_type = TREE_TYPE (exp);
3586 if (TREE_CODE (exp) == BIT_AND_EXPR)
3588 and_mask = TREE_OPERAND (exp, 1);
3589 exp = TREE_OPERAND (exp, 0);
3590 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3591 if (TREE_CODE (and_mask) != INTEGER_CST)
3595 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3596 punsignedp, pvolatilep, false);
3597 if ((inner == exp && and_mask == 0)
3598 || *pbitsize < 0 || offset != 0
3599 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3602 /* If the number of bits in the reference is the same as the bitsize of
3603 the outer type, then the outer type gives the signedness. Otherwise
3604 (in case of a small bitfield) the signedness is unchanged. */
3605 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3606 *punsignedp = TYPE_UNSIGNED (outer_type);
3608 /* Compute the mask to access the bitfield. */
3609 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3610 precision = TYPE_PRECISION (unsigned_type);
3612 mask = build_int_cst_type (unsigned_type, -1);
3614 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3615 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3617 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3619 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3620 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3623 *pand_mask = and_mask;
3627 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3631 all_ones_mask_p (const_tree mask, int size)
3633 tree type = TREE_TYPE (mask);
3634 unsigned int precision = TYPE_PRECISION (type);
3637 tmask = build_int_cst_type (signed_type_for (type), -1);
3640 tree_int_cst_equal (mask,
3641 const_binop (RSHIFT_EXPR,
3642 const_binop (LSHIFT_EXPR, tmask,
3643 size_int (precision - size)),
3644 size_int (precision - size)));
3647 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3648 represents the sign bit of EXP's type. If EXP represents a sign
3649 or zero extension, also test VAL against the unextended type.
3650 The return value is the (sub)expression whose sign bit is VAL,
3651 or NULL_TREE otherwise. */
3654 sign_bit_p (tree exp, const_tree val)
3656 unsigned HOST_WIDE_INT mask_lo, lo;
3657 HOST_WIDE_INT mask_hi, hi;
3661 /* Tree EXP must have an integral type. */
3662 t = TREE_TYPE (exp);
3663 if (! INTEGRAL_TYPE_P (t))
3666 /* Tree VAL must be an integer constant. */
3667 if (TREE_CODE (val) != INTEGER_CST
3668 || TREE_OVERFLOW (val))
3671 width = TYPE_PRECISION (t);
3672 if (width > HOST_BITS_PER_WIDE_INT)
3674 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3677 mask_hi = ((unsigned HOST_WIDE_INT) -1
3678 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3684 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3687 mask_lo = ((unsigned HOST_WIDE_INT) -1
3688 >> (HOST_BITS_PER_WIDE_INT - width));
3691 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3692 treat VAL as if it were unsigned. */
3693 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3694 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3697 /* Handle extension from a narrower type. */
3698 if (TREE_CODE (exp) == NOP_EXPR
3699 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3700 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3705 /* Subroutine for fold_truthop: determine if an operand is simple enough
3706 to be evaluated unconditionally. */
3709 simple_operand_p (const_tree exp)
3711 /* Strip any conversions that don't change the machine mode. */
3714 return (CONSTANT_CLASS_P (exp)
3715 || TREE_CODE (exp) == SSA_NAME
3717 && ! TREE_ADDRESSABLE (exp)
3718 && ! TREE_THIS_VOLATILE (exp)
3719 && ! DECL_NONLOCAL (exp)
3720 /* Don't regard global variables as simple. They may be
3721 allocated in ways unknown to the compiler (shared memory,
3722 #pragma weak, etc). */
3723 && ! TREE_PUBLIC (exp)
3724 && ! DECL_EXTERNAL (exp)
3725 /* Loading a static variable is unduly expensive, but global
3726 registers aren't expensive. */
3727 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3730 /* The following functions are subroutines to fold_range_test and allow it to
3731 try to change a logical combination of comparisons into a range test.
3734 X == 2 || X == 3 || X == 4 || X == 5
3738 (unsigned) (X - 2) <= 3
3740 We describe each set of comparisons as being either inside or outside
3741 a range, using a variable named like IN_P, and then describe the
3742 range with a lower and upper bound. If one of the bounds is omitted,
3743 it represents either the highest or lowest value of the type.
3745 In the comments below, we represent a range by two numbers in brackets
3746 preceded by a "+" to designate being inside that range, or a "-" to
3747 designate being outside that range, so the condition can be inverted by
3748 flipping the prefix. An omitted bound is represented by a "-". For
3749 example, "- [-, 10]" means being outside the range starting at the lowest
3750 possible value and ending at 10, in other words, being greater than 10.
3751 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3754 We set up things so that the missing bounds are handled in a consistent
3755 manner so neither a missing bound nor "true" and "false" need to be
3756 handled using a special case. */
3758 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3759 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3760 and UPPER1_P are nonzero if the respective argument is an upper bound
3761 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3762 must be specified for a comparison. ARG1 will be converted to ARG0's
3763 type if both are specified. */
3766 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3767 tree arg1, int upper1_p)
3773 /* If neither arg represents infinity, do the normal operation.
3774 Else, if not a comparison, return infinity. Else handle the special
3775 comparison rules. Note that most of the cases below won't occur, but
3776 are handled for consistency. */
3778 if (arg0 != 0 && arg1 != 0)
3780 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3781 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3783 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3786 if (TREE_CODE_CLASS (code) != tcc_comparison)
3789 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3790 for neither. In real maths, we cannot assume open ended ranges are
3791 the same. But, this is computer arithmetic, where numbers are finite.
3792 We can therefore make the transformation of any unbounded range with
3793 the value Z, Z being greater than any representable number. This permits
3794 us to treat unbounded ranges as equal. */
3795 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3796 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3800 result = sgn0 == sgn1;
3803 result = sgn0 != sgn1;
3806 result = sgn0 < sgn1;
3809 result = sgn0 <= sgn1;
3812 result = sgn0 > sgn1;
3815 result = sgn0 >= sgn1;
3821 return constant_boolean_node (result, type);
3824 /* Given EXP, a logical expression, set the range it is testing into
3825 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3826 actually being tested. *PLOW and *PHIGH will be made of the same
3827 type as the returned expression. If EXP is not a comparison, we
3828 will most likely not be returning a useful value and range. Set
3829 *STRICT_OVERFLOW_P to true if the return value is only valid
3830 because signed overflow is undefined; otherwise, do not change
3831 *STRICT_OVERFLOW_P. */
3834 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3835 bool *strict_overflow_p)
3837 enum tree_code code;
3838 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3839 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3841 tree low, high, n_low, n_high;
3842 location_t loc = EXPR_LOCATION (exp);
3844 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3845 and see if we can refine the range. Some of the cases below may not
3846 happen, but it doesn't seem worth worrying about this. We "continue"
3847 the outer loop when we've changed something; otherwise we "break"
3848 the switch, which will "break" the while. */
3851 low = high = build_int_cst (TREE_TYPE (exp), 0);
3855 code = TREE_CODE (exp);
3856 exp_type = TREE_TYPE (exp);
3858 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3860 if (TREE_OPERAND_LENGTH (exp) > 0)
3861 arg0 = TREE_OPERAND (exp, 0);
3862 if (TREE_CODE_CLASS (code) == tcc_comparison
3863 || TREE_CODE_CLASS (code) == tcc_unary
3864 || TREE_CODE_CLASS (code) == tcc_binary)
3865 arg0_type = TREE_TYPE (arg0);
3866 if (TREE_CODE_CLASS (code) == tcc_binary
3867 || TREE_CODE_CLASS (code) == tcc_comparison
3868 || (TREE_CODE_CLASS (code) == tcc_expression
3869 && TREE_OPERAND_LENGTH (exp) > 1))
3870 arg1 = TREE_OPERAND (exp, 1);
3875 case TRUTH_NOT_EXPR:
3876 in_p = ! in_p, exp = arg0;
3879 case EQ_EXPR: case NE_EXPR:
3880 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3881 /* We can only do something if the range is testing for zero
3882 and if the second operand is an integer constant. Note that
3883 saying something is "in" the range we make is done by
3884 complementing IN_P since it will set in the initial case of
3885 being not equal to zero; "out" is leaving it alone. */
3886 if (low == 0 || high == 0
3887 || ! integer_zerop (low) || ! integer_zerop (high)
3888 || TREE_CODE (arg1) != INTEGER_CST)
3893 case NE_EXPR: /* - [c, c] */
3896 case EQ_EXPR: /* + [c, c] */
3897 in_p = ! in_p, low = high = arg1;
3899 case GT_EXPR: /* - [-, c] */
3900 low = 0, high = arg1;
3902 case GE_EXPR: /* + [c, -] */
3903 in_p = ! in_p, low = arg1, high = 0;
3905 case LT_EXPR: /* - [c, -] */
3906 low = arg1, high = 0;
3908 case LE_EXPR: /* + [-, c] */
3909 in_p = ! in_p, low = 0, high = arg1;
3915 /* If this is an unsigned comparison, we also know that EXP is
3916 greater than or equal to zero. We base the range tests we make
3917 on that fact, so we record it here so we can parse existing
3918 range tests. We test arg0_type since often the return type
3919 of, e.g. EQ_EXPR, is boolean. */
3920 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3922 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3924 build_int_cst (arg0_type, 0),
3928 in_p = n_in_p, low = n_low, high = n_high;
3930 /* If the high bound is missing, but we have a nonzero low
3931 bound, reverse the range so it goes from zero to the low bound
3933 if (high == 0 && low && ! integer_zerop (low))
3936 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3937 integer_one_node, 0);
3938 low = build_int_cst (arg0_type, 0);
3946 /* (-x) IN [a,b] -> x in [-b, -a] */
3947 n_low = range_binop (MINUS_EXPR, exp_type,
3948 build_int_cst (exp_type, 0),
3950 n_high = range_binop (MINUS_EXPR, exp_type,
3951 build_int_cst (exp_type, 0),
3953 if (n_high != 0 && TREE_OVERFLOW (n_high))
3959 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3960 build_int_cst (exp_type, 1));
3963 case PLUS_EXPR: case MINUS_EXPR:
3964 if (TREE_CODE (arg1) != INTEGER_CST)
3967 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3968 move a constant to the other side. */
3969 if (!TYPE_UNSIGNED (arg0_type)
3970 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3973 /* If EXP is signed, any overflow in the computation is undefined,
3974 so we don't worry about it so long as our computations on
3975 the bounds don't overflow. For unsigned, overflow is defined
3976 and this is exactly the right thing. */
3977 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3978 arg0_type, low, 0, arg1, 0);
3979 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3980 arg0_type, high, 1, arg1, 0);
3981 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3982 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3985 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3986 *strict_overflow_p = true;
3989 /* Check for an unsigned range which has wrapped around the maximum
3990 value thus making n_high < n_low, and normalize it. */
3991 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3993 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3994 integer_one_node, 0);
3995 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3996 integer_one_node, 0);
3998 /* If the range is of the form +/- [ x+1, x ], we won't
3999 be able to normalize it. But then, it represents the
4000 whole range or the empty set, so make it
4002 if (tree_int_cst_equal (n_low, low)
4003 && tree_int_cst_equal (n_high, high))
4009 low = n_low, high = n_high;
4014 CASE_CONVERT: case NON_LVALUE_EXPR:
4015 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4018 if (! INTEGRAL_TYPE_P (arg0_type)
4019 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4020 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4023 n_low = low, n_high = high;
4026 n_low = fold_convert_loc (loc, arg0_type, n_low);
4029 n_high = fold_convert_loc (loc, arg0_type, n_high);
4032 /* If we're converting arg0 from an unsigned type, to exp,
4033 a signed type, we will be doing the comparison as unsigned.
4034 The tests above have already verified that LOW and HIGH
4037 So we have to ensure that we will handle large unsigned
4038 values the same way that the current signed bounds treat
4041 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4045 /* For fixed-point modes, we need to pass the saturating flag
4046 as the 2nd parameter. */
4047 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4048 equiv_type = lang_hooks.types.type_for_mode
4049 (TYPE_MODE (arg0_type),
4050 TYPE_SATURATING (arg0_type));
4052 equiv_type = lang_hooks.types.type_for_mode
4053 (TYPE_MODE (arg0_type), 1);
4055 /* A range without an upper bound is, naturally, unbounded.
4056 Since convert would have cropped a very large value, use
4057 the max value for the destination type. */
4059 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4060 : TYPE_MAX_VALUE (arg0_type);
4062 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4063 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4064 fold_convert_loc (loc, arg0_type,
4066 build_int_cst (arg0_type, 1));
4068 /* If the low bound is specified, "and" the range with the
4069 range for which the original unsigned value will be
4073 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4074 1, n_low, n_high, 1,
4075 fold_convert_loc (loc, arg0_type,
4080 in_p = (n_in_p == in_p);
4084 /* Otherwise, "or" the range with the range of the input
4085 that will be interpreted as negative. */
4086 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4087 0, n_low, n_high, 1,
4088 fold_convert_loc (loc, arg0_type,
4093 in_p = (in_p != n_in_p);
4098 low = n_low, high = n_high;
4108 /* If EXP is a constant, we can evaluate whether this is true or false. */
4109 if (TREE_CODE (exp) == INTEGER_CST)
4111 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4113 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4119 *pin_p = in_p, *plow = low, *phigh = high;
4123 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4124 type, TYPE, return an expression to test if EXP is in (or out of, depending
4125 on IN_P) the range. Return 0 if the test couldn't be created. */
4128 build_range_check (location_t loc, tree type, tree exp, int in_p,
4129 tree low, tree high)
4131 tree etype = TREE_TYPE (exp), value;
4133 #ifdef HAVE_canonicalize_funcptr_for_compare
4134 /* Disable this optimization for function pointer expressions
4135 on targets that require function pointer canonicalization. */
4136 if (HAVE_canonicalize_funcptr_for_compare
4137 && TREE_CODE (etype) == POINTER_TYPE
4138 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4144 value = build_range_check (loc, type, exp, 1, low, high);
4146 return invert_truthvalue_loc (loc, value);
4151 if (low == 0 && high == 0)
4152 return build_int_cst (type, 1);
4155 return fold_build2_loc (loc, LE_EXPR, type, exp,
4156 fold_convert_loc (loc, etype, high));
4159 return fold_build2_loc (loc, GE_EXPR, type, exp,
4160 fold_convert_loc (loc, etype, low));
4162 if (operand_equal_p (low, high, 0))
4163 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4164 fold_convert_loc (loc, etype, low));
4166 if (integer_zerop (low))
4168 if (! TYPE_UNSIGNED (etype))
4170 etype = unsigned_type_for (etype);
4171 high = fold_convert_loc (loc, etype, high);
4172 exp = fold_convert_loc (loc, etype, exp);
4174 return build_range_check (loc, type, exp, 1, 0, high);
4177 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4178 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4180 unsigned HOST_WIDE_INT lo;
4184 prec = TYPE_PRECISION (etype);
4185 if (prec <= HOST_BITS_PER_WIDE_INT)
4188 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4192 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4193 lo = (unsigned HOST_WIDE_INT) -1;
4196 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4198 if (TYPE_UNSIGNED (etype))
4200 tree signed_etype = signed_type_for (etype);
4201 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4203 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4205 etype = signed_etype;
4206 exp = fold_convert_loc (loc, etype, exp);
4208 return fold_build2_loc (loc, GT_EXPR, type, exp,
4209 build_int_cst (etype, 0));
4213 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4214 This requires wrap-around arithmetics for the type of the expression.
4215 First make sure that arithmetics in this type is valid, then make sure
4216 that it wraps around. */
4217 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4218 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4219 TYPE_UNSIGNED (etype));
4221 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4223 tree utype, minv, maxv;
4225 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4226 for the type in question, as we rely on this here. */
4227 utype = unsigned_type_for (etype);
4228 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4229 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4230 integer_one_node, 1);
4231 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4233 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4240 high = fold_convert_loc (loc, etype, high);
4241 low = fold_convert_loc (loc, etype, low);
4242 exp = fold_convert_loc (loc, etype, exp);
4244 value = const_binop (MINUS_EXPR, high, low);
4247 if (POINTER_TYPE_P (etype))
4249 if (value != 0 && !TREE_OVERFLOW (value))
4251 low = fold_convert_loc (loc, sizetype, low);
4252 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4253 return build_range_check (loc, type,
4254 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4256 1, build_int_cst (etype, 0), value);
4261 if (value != 0 && !TREE_OVERFLOW (value))
4262 return build_range_check (loc, type,
4263 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4264 1, build_int_cst (etype, 0), value);
4269 /* Return the predecessor of VAL in its type, handling the infinite case. */
4272 range_predecessor (tree val)
4274 tree type = TREE_TYPE (val);
4276 if (INTEGRAL_TYPE_P (type)
4277 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4280 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4283 /* Return the successor of VAL in its type, handling the infinite case. */
4286 range_successor (tree val)
4288 tree type = TREE_TYPE (val);
4290 if (INTEGRAL_TYPE_P (type)
4291 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4294 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4297 /* Given two ranges, see if we can merge them into one. Return 1 if we
4298 can, 0 if we can't. Set the output range into the specified parameters. */
4301 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4302 tree high0, int in1_p, tree low1, tree high1)
4310 int lowequal = ((low0 == 0 && low1 == 0)
4311 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4312 low0, 0, low1, 0)));
4313 int highequal = ((high0 == 0 && high1 == 0)
4314 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4315 high0, 1, high1, 1)));
4317 /* Make range 0 be the range that starts first, or ends last if they
4318 start at the same value. Swap them if it isn't. */
4319 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4322 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4323 high1, 1, high0, 1))))
4325 temp = in0_p, in0_p = in1_p, in1_p = temp;
4326 tem = low0, low0 = low1, low1 = tem;
4327 tem = high0, high0 = high1, high1 = tem;
4330 /* Now flag two cases, whether the ranges are disjoint or whether the
4331 second range is totally subsumed in the first. Note that the tests
4332 below are simplified by the ones above. */
4333 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4334 high0, 1, low1, 0));
4335 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4336 high1, 1, high0, 1));
4338 /* We now have four cases, depending on whether we are including or
4339 excluding the two ranges. */
4342 /* If they don't overlap, the result is false. If the second range
4343 is a subset it is the result. Otherwise, the range is from the start
4344 of the second to the end of the first. */
4346 in_p = 0, low = high = 0;
4348 in_p = 1, low = low1, high = high1;
4350 in_p = 1, low = low1, high = high0;
4353 else if (in0_p && ! in1_p)
4355 /* If they don't overlap, the result is the first range. If they are
4356 equal, the result is false. If the second range is a subset of the
4357 first, and the ranges begin at the same place, we go from just after
4358 the end of the second range to the end of the first. If the second
4359 range is not a subset of the first, or if it is a subset and both
4360 ranges end at the same place, the range starts at the start of the
4361 first range and ends just before the second range.
4362 Otherwise, we can't describe this as a single range. */
4364 in_p = 1, low = low0, high = high0;
4365 else if (lowequal && highequal)
4366 in_p = 0, low = high = 0;
4367 else if (subset && lowequal)
4369 low = range_successor (high1);
4374 /* We are in the weird situation where high0 > high1 but
4375 high1 has no successor. Punt. */
4379 else if (! subset || highequal)
4382 high = range_predecessor (low1);
4386 /* low0 < low1 but low1 has no predecessor. Punt. */
4394 else if (! in0_p && in1_p)
4396 /* If they don't overlap, the result is the second range. If the second
4397 is a subset of the first, the result is false. Otherwise,
4398 the range starts just after the first range and ends at the
4399 end of the second. */
4401 in_p = 1, low = low1, high = high1;
4402 else if (subset || highequal)
4403 in_p = 0, low = high = 0;
4406 low = range_successor (high0);
4411 /* high1 > high0 but high0 has no successor. Punt. */
4419 /* The case where we are excluding both ranges. Here the complex case
4420 is if they don't overlap. In that case, the only time we have a
4421 range is if they are adjacent. If the second is a subset of the
4422 first, the result is the first. Otherwise, the range to exclude
4423 starts at the beginning of the first range and ends at the end of the
4427 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4428 range_successor (high0),
4430 in_p = 0, low = low0, high = high1;
4433 /* Canonicalize - [min, x] into - [-, x]. */
4434 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4435 switch (TREE_CODE (TREE_TYPE (low0)))
4438 if (TYPE_PRECISION (TREE_TYPE (low0))
4439 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4443 if (tree_int_cst_equal (low0,
4444 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4448 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4449 && integer_zerop (low0))
4456 /* Canonicalize - [x, max] into - [x, -]. */
4457 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4458 switch (TREE_CODE (TREE_TYPE (high1)))
4461 if (TYPE_PRECISION (TREE_TYPE (high1))
4462 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4466 if (tree_int_cst_equal (high1,
4467 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4471 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4472 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4474 integer_one_node, 1)))
4481 /* The ranges might be also adjacent between the maximum and
4482 minimum values of the given type. For
4483 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4484 return + [x + 1, y - 1]. */
4485 if (low0 == 0 && high1 == 0)
4487 low = range_successor (high0);
4488 high = range_predecessor (low1);
4489 if (low == 0 || high == 0)
4499 in_p = 0, low = low0, high = high0;
4501 in_p = 0, low = low0, high = high1;
4504 *pin_p = in_p, *plow = low, *phigh = high;
4509 /* Subroutine of fold, looking inside expressions of the form
4510 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4511 of the COND_EXPR. This function is being used also to optimize
4512 A op B ? C : A, by reversing the comparison first.
4514 Return a folded expression whose code is not a COND_EXPR
4515 anymore, or NULL_TREE if no folding opportunity is found. */
4518 fold_cond_expr_with_comparison (location_t loc, tree type,
4519 tree arg0, tree arg1, tree arg2)
4521 enum tree_code comp_code = TREE_CODE (arg0);
4522 tree arg00 = TREE_OPERAND (arg0, 0);
4523 tree arg01 = TREE_OPERAND (arg0, 1);
4524 tree arg1_type = TREE_TYPE (arg1);
4530 /* If we have A op 0 ? A : -A, consider applying the following
4533 A == 0? A : -A same as -A
4534 A != 0? A : -A same as A
4535 A >= 0? A : -A same as abs (A)
4536 A > 0? A : -A same as abs (A)
4537 A <= 0? A : -A same as -abs (A)
4538 A < 0? A : -A same as -abs (A)
4540 None of these transformations work for modes with signed
4541 zeros. If A is +/-0, the first two transformations will
4542 change the sign of the result (from +0 to -0, or vice
4543 versa). The last four will fix the sign of the result,
4544 even though the original expressions could be positive or
4545 negative, depending on the sign of A.
4547 Note that all these transformations are correct if A is
4548 NaN, since the two alternatives (A and -A) are also NaNs. */
4549 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4550 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4551 ? real_zerop (arg01)
4552 : integer_zerop (arg01))
4553 && ((TREE_CODE (arg2) == NEGATE_EXPR
4554 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4555 /* In the case that A is of the form X-Y, '-A' (arg2) may
4556 have already been folded to Y-X, check for that. */
4557 || (TREE_CODE (arg1) == MINUS_EXPR
4558 && TREE_CODE (arg2) == MINUS_EXPR
4559 && operand_equal_p (TREE_OPERAND (arg1, 0),
4560 TREE_OPERAND (arg2, 1), 0)
4561 && operand_equal_p (TREE_OPERAND (arg1, 1),
4562 TREE_OPERAND (arg2, 0), 0))))
4567 tem = fold_convert_loc (loc, arg1_type, arg1);
4568 return pedantic_non_lvalue_loc (loc,
4569 fold_convert_loc (loc, type,
4570 negate_expr (tem)));
4573 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4576 if (flag_trapping_math)
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert_loc (loc, signed_type_for
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4588 if (flag_trapping_math)
4592 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4593 arg1 = fold_convert_loc (loc, signed_type_for
4594 (TREE_TYPE (arg1)), arg1);
4595 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4596 return negate_expr (fold_convert_loc (loc, type, tem));
4598 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4602 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4603 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4604 both transformations are correct when A is NaN: A != 0
4605 is then true, and A == 0 is false. */
4607 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4608 && integer_zerop (arg01) && integer_zerop (arg2))
4610 if (comp_code == NE_EXPR)
4611 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4612 else if (comp_code == EQ_EXPR)
4613 return build_int_cst (type, 0);
4616 /* Try some transformations of A op B ? A : B.
4618 A == B? A : B same as B
4619 A != B? A : B same as A
4620 A >= B? A : B same as max (A, B)
4621 A > B? A : B same as max (B, A)
4622 A <= B? A : B same as min (A, B)
4623 A < B? A : B same as min (B, A)
4625 As above, these transformations don't work in the presence
4626 of signed zeros. For example, if A and B are zeros of
4627 opposite sign, the first two transformations will change
4628 the sign of the result. In the last four, the original
4629 expressions give different results for (A=+0, B=-0) and
4630 (A=-0, B=+0), but the transformed expressions do not.
4632 The first two transformations are correct if either A or B
4633 is a NaN. In the first transformation, the condition will
4634 be false, and B will indeed be chosen. In the case of the
4635 second transformation, the condition A != B will be true,
4636 and A will be chosen.
4638 The conversions to max() and min() are not correct if B is
4639 a number and A is not. The conditions in the original
4640 expressions will be false, so all four give B. The min()
4641 and max() versions would give a NaN instead. */
4642 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4643 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4644 /* Avoid these transformations if the COND_EXPR may be used
4645 as an lvalue in the C++ front-end. PR c++/19199. */
4647 || (strcmp (lang_hooks.name, "GNU C++") != 0
4648 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4649 || ! maybe_lvalue_p (arg1)
4650 || ! maybe_lvalue_p (arg2)))
4652 tree comp_op0 = arg00;
4653 tree comp_op1 = arg01;
4654 tree comp_type = TREE_TYPE (comp_op0);
4656 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4657 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4667 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4669 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4674 /* In C++ a ?: expression can be an lvalue, so put the
4675 operand which will be used if they are equal first
4676 so that we can convert this back to the
4677 corresponding COND_EXPR. */
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4680 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4681 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4682 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4683 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4684 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4685 comp_op1, comp_op0);
4686 return pedantic_non_lvalue_loc (loc,
4687 fold_convert_loc (loc, type, tem));
4694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4696 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4697 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4698 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4699 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4700 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4701 comp_op1, comp_op0);
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, tem));
4707 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4708 return pedantic_non_lvalue_loc (loc,
4709 fold_convert_loc (loc, type, arg2));
4712 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4713 return pedantic_non_lvalue_loc (loc,
4714 fold_convert_loc (loc, type, arg1));
4717 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4722 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4723 we might still be able to simplify this. For example,
4724 if C1 is one less or one more than C2, this might have started
4725 out as a MIN or MAX and been transformed by this function.
4726 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4728 if (INTEGRAL_TYPE_P (type)
4729 && TREE_CODE (arg01) == INTEGER_CST
4730 && TREE_CODE (arg2) == INTEGER_CST)
4734 if (TREE_CODE (arg1) == INTEGER_CST)
4736 /* We can replace A with C1 in this case. */
4737 arg1 = fold_convert_loc (loc, type, arg01);
4738 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4741 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4742 MIN_EXPR, to preserve the signedness of the comparison. */
4743 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4745 && operand_equal_p (arg01,
4746 const_binop (PLUS_EXPR, arg2,
4747 build_int_cst (type, 1)),
4750 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4751 fold_convert_loc (loc, TREE_TYPE (arg00),
4753 return pedantic_non_lvalue_loc (loc,
4754 fold_convert_loc (loc, type, tem));
4759 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4761 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4763 && operand_equal_p (arg01,
4764 const_binop (MINUS_EXPR, arg2,
4765 build_int_cst (type, 1)),
4768 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4769 fold_convert_loc (loc, TREE_TYPE (arg00),
4771 return pedantic_non_lvalue_loc (loc,
4772 fold_convert_loc (loc, type, tem));
4777 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4778 MAX_EXPR, to preserve the signedness of the comparison. */
4779 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4781 && operand_equal_p (arg01,
4782 const_binop (MINUS_EXPR, arg2,
4783 build_int_cst (type, 1)),
4786 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4787 fold_convert_loc (loc, TREE_TYPE (arg00),
4789 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4794 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4795 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4797 && operand_equal_p (arg01,
4798 const_binop (PLUS_EXPR, arg2,
4799 build_int_cst (type, 1)),
4802 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4803 fold_convert_loc (loc, TREE_TYPE (arg00),
4805 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4819 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4820 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4821 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4825 /* EXP is some logical combination of boolean tests. See if we can
4826 merge it into some range test. Return the new tree if so. */
4829 fold_range_test (location_t loc, enum tree_code code, tree type,
4832 int or_op = (code == TRUTH_ORIF_EXPR
4833 || code == TRUTH_OR_EXPR);
4834 int in0_p, in1_p, in_p;
4835 tree low0, low1, low, high0, high1, high;
4836 bool strict_overflow_p = false;
4837 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4838 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4840 const char * const warnmsg = G_("assuming signed overflow does not occur "
4841 "when simplifying range test");
4843 /* If this is an OR operation, invert both sides; we will invert
4844 again at the end. */
4846 in0_p = ! in0_p, in1_p = ! in1_p;
4848 /* If both expressions are the same, if we can merge the ranges, and we
4849 can build the range test, return it or it inverted. If one of the
4850 ranges is always true or always false, consider it to be the same
4851 expression as the other. */
4852 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4853 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4855 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4857 : rhs != 0 ? rhs : integer_zero_node,
4860 if (strict_overflow_p)
4861 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4862 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4865 /* On machines where the branch cost is expensive, if this is a
4866 short-circuited branch and the underlying object on both sides
4867 is the same, make a non-short-circuit operation. */
4868 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4869 && lhs != 0 && rhs != 0
4870 && (code == TRUTH_ANDIF_EXPR
4871 || code == TRUTH_ORIF_EXPR)
4872 && operand_equal_p (lhs, rhs, 0))
4874 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4875 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4876 which cases we can't do this. */
4877 if (simple_operand_p (lhs))
4878 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4879 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4882 else if (lang_hooks.decls.global_bindings_p () == 0
4883 && ! CONTAINS_PLACEHOLDER_P (lhs))
4885 tree common = save_expr (lhs);
4887 if (0 != (lhs = build_range_check (loc, type, common,
4888 or_op ? ! in0_p : in0_p,
4890 && (0 != (rhs = build_range_check (loc, type, common,
4891 or_op ? ! in1_p : in1_p,
4894 if (strict_overflow_p)
4895 fold_overflow_warning (warnmsg,
4896 WARN_STRICT_OVERFLOW_COMPARISON);
4897 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4898 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4907 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4908 bit value. Arrange things so the extra bits will be set to zero if and
4909 only if C is signed-extended to its full width. If MASK is nonzero,
4910 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4913 unextend (tree c, int p, int unsignedp, tree mask)
4915 tree type = TREE_TYPE (c);
4916 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4919 if (p == modesize || unsignedp)
4922 /* We work by getting just the sign bit into the low-order bit, then
4923 into the high-order bit, then sign-extend. We then XOR that value
4925 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4926 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4928 /* We must use a signed type in order to get an arithmetic right shift.
4929 However, we must also avoid introducing accidental overflows, so that
4930 a subsequent call to integer_zerop will work. Hence we must
4931 do the type conversion here. At this point, the constant is either
4932 zero or one, and the conversion to a signed type can never overflow.
4933 We could get an overflow if this conversion is done anywhere else. */
4934 if (TYPE_UNSIGNED (type))
4935 temp = fold_convert (signed_type_for (type), temp);
4937 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4938 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4940 temp = const_binop (BIT_AND_EXPR, temp,
4941 fold_convert (TREE_TYPE (c), mask));
4942 /* If necessary, convert the type back to match the type of C. */
4943 if (TYPE_UNSIGNED (type))
4944 temp = fold_convert (type, temp);
4946 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4949 /* For an expression that has the form
4953 we can drop one of the inner expressions and simplify to
4957 LOC is the location of the resulting expression. OP is the inner
4958 logical operation; the left-hand side in the examples above, while CMPOP
4959 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4960 removing a condition that guards another, as in
4961 (A != NULL && A->...) || A == NULL
4962 which we must not transform. If RHS_ONLY is true, only eliminate the
4963 right-most operand of the inner logical operation. */
4966 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4969 tree type = TREE_TYPE (cmpop);
4970 enum tree_code code = TREE_CODE (cmpop);
4971 enum tree_code truthop_code = TREE_CODE (op);
4972 tree lhs = TREE_OPERAND (op, 0);
4973 tree rhs = TREE_OPERAND (op, 1);
4974 tree orig_lhs = lhs, orig_rhs = rhs;
4975 enum tree_code rhs_code = TREE_CODE (rhs);
4976 enum tree_code lhs_code = TREE_CODE (lhs);
4977 enum tree_code inv_code;
4979 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4982 if (TREE_CODE_CLASS (code) != tcc_comparison)
4985 if (rhs_code == truthop_code)
4987 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4988 if (newrhs != NULL_TREE)
4991 rhs_code = TREE_CODE (rhs);
4994 if (lhs_code == truthop_code && !rhs_only)
4996 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4997 if (newlhs != NULL_TREE)
5000 lhs_code = TREE_CODE (lhs);
5004 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5005 if (inv_code == rhs_code
5006 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5007 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5009 if (!rhs_only && inv_code == lhs_code
5010 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5011 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5013 if (rhs != orig_rhs || lhs != orig_lhs)
5014 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5019 /* Find ways of folding logical expressions of LHS and RHS:
5020 Try to merge two comparisons to the same innermost item.
5021 Look for range tests like "ch >= '0' && ch <= '9'".
5022 Look for combinations of simple terms on machines with expensive branches
5023 and evaluate the RHS unconditionally.
5025 For example, if we have p->a == 2 && p->b == 4 and we can make an
5026 object large enough to span both A and B, we can do this with a comparison
5027 against the object ANDed with the a mask.
5029 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5030 operations to do this with one comparison.
5032 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5033 function and the one above.
5035 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5036 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5038 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5041 We return the simplified tree or 0 if no optimization is possible. */
5044 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5047 /* If this is the "or" of two comparisons, we can do something if
5048 the comparisons are NE_EXPR. If this is the "and", we can do something
5049 if the comparisons are EQ_EXPR. I.e.,
5050 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5052 WANTED_CODE is this operation code. For single bit fields, we can
5053 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5054 comparison for one-bit fields. */
5056 enum tree_code wanted_code;
5057 enum tree_code lcode, rcode;
5058 tree ll_arg, lr_arg, rl_arg, rr_arg;
5059 tree ll_inner, lr_inner, rl_inner, rr_inner;
5060 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5061 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5062 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5063 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5064 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5065 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5066 enum machine_mode lnmode, rnmode;
5067 tree ll_mask, lr_mask, rl_mask, rr_mask;
5068 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5069 tree l_const, r_const;
5070 tree lntype, rntype, result;
5071 HOST_WIDE_INT first_bit, end_bit;
5073 tree orig_lhs = lhs, orig_rhs = rhs;
5074 enum tree_code orig_code = code;
5076 /* Start by getting the comparison codes. Fail if anything is volatile.
5077 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5078 it were surrounded with a NE_EXPR. */
5080 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5083 lcode = TREE_CODE (lhs);
5084 rcode = TREE_CODE (rhs);
5086 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5088 lhs = build2 (NE_EXPR, truth_type, lhs,
5089 build_int_cst (TREE_TYPE (lhs), 0));
5093 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5095 rhs = build2 (NE_EXPR, truth_type, rhs,
5096 build_int_cst (TREE_TYPE (rhs), 0));
5100 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5101 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5104 ll_arg = TREE_OPERAND (lhs, 0);
5105 lr_arg = TREE_OPERAND (lhs, 1);
5106 rl_arg = TREE_OPERAND (rhs, 0);
5107 rr_arg = TREE_OPERAND (rhs, 1);
5109 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5110 if (simple_operand_p (ll_arg)
5111 && simple_operand_p (lr_arg))
5113 if (operand_equal_p (ll_arg, rl_arg, 0)
5114 && operand_equal_p (lr_arg, rr_arg, 0))
5116 result = combine_comparisons (loc, code, lcode, rcode,
5117 truth_type, ll_arg, lr_arg);
5121 else if (operand_equal_p (ll_arg, rr_arg, 0)
5122 && operand_equal_p (lr_arg, rl_arg, 0))
5124 result = combine_comparisons (loc, code, lcode,
5125 swap_tree_comparison (rcode),
5126 truth_type, ll_arg, lr_arg);
5132 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5133 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5135 /* If the RHS can be evaluated unconditionally and its operands are
5136 simple, it wins to evaluate the RHS unconditionally on machines
5137 with expensive branches. In this case, this isn't a comparison
5138 that can be merged. Avoid doing this if the RHS is a floating-point
5139 comparison since those can trap. */
5141 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5143 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5144 && simple_operand_p (rl_arg)
5145 && simple_operand_p (rr_arg))
5147 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5148 if (code == TRUTH_OR_EXPR
5149 && lcode == NE_EXPR && integer_zerop (lr_arg)
5150 && rcode == NE_EXPR && integer_zerop (rr_arg)
5151 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5152 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5153 return build2_loc (loc, NE_EXPR, truth_type,
5154 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5156 build_int_cst (TREE_TYPE (ll_arg), 0));
5158 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5159 if (code == TRUTH_AND_EXPR
5160 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5161 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5162 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5163 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5164 return build2_loc (loc, EQ_EXPR, truth_type,
5165 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5167 build_int_cst (TREE_TYPE (ll_arg), 0));
5169 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5171 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5172 return build2_loc (loc, code, truth_type, lhs, rhs);
5177 /* See if the comparisons can be merged. Then get all the parameters for
5180 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5181 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5185 ll_inner = decode_field_reference (loc, ll_arg,
5186 &ll_bitsize, &ll_bitpos, &ll_mode,
5187 &ll_unsignedp, &volatilep, &ll_mask,
5189 lr_inner = decode_field_reference (loc, lr_arg,
5190 &lr_bitsize, &lr_bitpos, &lr_mode,
5191 &lr_unsignedp, &volatilep, &lr_mask,
5193 rl_inner = decode_field_reference (loc, rl_arg,
5194 &rl_bitsize, &rl_bitpos, &rl_mode,
5195 &rl_unsignedp, &volatilep, &rl_mask,
5197 rr_inner = decode_field_reference (loc, rr_arg,
5198 &rr_bitsize, &rr_bitpos, &rr_mode,
5199 &rr_unsignedp, &volatilep, &rr_mask,
5202 /* It must be true that the inner operation on the lhs of each
5203 comparison must be the same if we are to be able to do anything.
5204 Then see if we have constants. If not, the same must be true for
5206 if (volatilep || ll_inner == 0 || rl_inner == 0
5207 || ! operand_equal_p (ll_inner, rl_inner, 0))
5210 if (TREE_CODE (lr_arg) == INTEGER_CST
5211 && TREE_CODE (rr_arg) == INTEGER_CST)
5212 l_const = lr_arg, r_const = rr_arg;
5213 else if (lr_inner == 0 || rr_inner == 0
5214 || ! operand_equal_p (lr_inner, rr_inner, 0))
5217 l_const = r_const = 0;
5219 /* If either comparison code is not correct for our logical operation,
5220 fail. However, we can convert a one-bit comparison against zero into
5221 the opposite comparison against that bit being set in the field. */
5223 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5224 if (lcode != wanted_code)
5226 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5228 /* Make the left operand unsigned, since we are only interested
5229 in the value of one bit. Otherwise we are doing the wrong
5238 /* This is analogous to the code for l_const above. */
5239 if (rcode != wanted_code)
5241 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5250 /* See if we can find a mode that contains both fields being compared on
5251 the left. If we can't, fail. Otherwise, update all constants and masks
5252 to be relative to a field of that size. */
5253 first_bit = MIN (ll_bitpos, rl_bitpos);
5254 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5255 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5256 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5258 if (lnmode == VOIDmode)
5261 lnbitsize = GET_MODE_BITSIZE (lnmode);
5262 lnbitpos = first_bit & ~ (lnbitsize - 1);
5263 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5264 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5266 if (BYTES_BIG_ENDIAN)
5268 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5269 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5272 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5273 size_int (xll_bitpos));
5274 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5275 size_int (xrl_bitpos));
5279 l_const = fold_convert_loc (loc, lntype, l_const);
5280 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5281 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5282 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5283 fold_build1_loc (loc, BIT_NOT_EXPR,
5286 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5288 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5293 r_const = fold_convert_loc (loc, lntype, r_const);
5294 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5295 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5296 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5297 fold_build1_loc (loc, BIT_NOT_EXPR,
5300 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5302 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5306 /* If the right sides are not constant, do the same for it. Also,
5307 disallow this optimization if a size or signedness mismatch occurs
5308 between the left and right sides. */
5311 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5312 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5313 /* Make sure the two fields on the right
5314 correspond to the left without being swapped. */
5315 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5318 first_bit = MIN (lr_bitpos, rr_bitpos);
5319 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5320 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5321 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5323 if (rnmode == VOIDmode)
5326 rnbitsize = GET_MODE_BITSIZE (rnmode);
5327 rnbitpos = first_bit & ~ (rnbitsize - 1);
5328 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5329 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5331 if (BYTES_BIG_ENDIAN)
5333 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5334 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5337 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5339 size_int (xlr_bitpos));
5340 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5342 size_int (xrr_bitpos));
5344 /* Make a mask that corresponds to both fields being compared.
5345 Do this for both items being compared. If the operands are the
5346 same size and the bits being compared are in the same position
5347 then we can do this by masking both and comparing the masked
5349 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5350 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5351 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5353 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5354 ll_unsignedp || rl_unsignedp);
5355 if (! all_ones_mask_p (ll_mask, lnbitsize))
5356 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5358 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5359 lr_unsignedp || rr_unsignedp);
5360 if (! all_ones_mask_p (lr_mask, rnbitsize))
5361 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5363 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5366 /* There is still another way we can do something: If both pairs of
5367 fields being compared are adjacent, we may be able to make a wider
5368 field containing them both.
5370 Note that we still must mask the lhs/rhs expressions. Furthermore,
5371 the mask must be shifted to account for the shift done by
5372 make_bit_field_ref. */
5373 if ((ll_bitsize + ll_bitpos == rl_bitpos
5374 && lr_bitsize + lr_bitpos == rr_bitpos)
5375 || (ll_bitpos == rl_bitpos + rl_bitsize
5376 && lr_bitpos == rr_bitpos + rr_bitsize))
5380 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5381 ll_bitsize + rl_bitsize,
5382 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5383 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5384 lr_bitsize + rr_bitsize,
5385 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5387 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5388 size_int (MIN (xll_bitpos, xrl_bitpos)));
5389 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5390 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5392 /* Convert to the smaller type before masking out unwanted bits. */
5394 if (lntype != rntype)
5396 if (lnbitsize > rnbitsize)
5398 lhs = fold_convert_loc (loc, rntype, lhs);
5399 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5402 else if (lnbitsize < rnbitsize)
5404 rhs = fold_convert_loc (loc, lntype, rhs);
5405 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5410 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5411 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5413 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5414 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5416 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5422 /* Handle the case of comparisons with constants. If there is something in
5423 common between the masks, those bits of the constants must be the same.
5424 If not, the condition is always false. Test for this to avoid generating
5425 incorrect code below. */
5426 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5427 if (! integer_zerop (result)
5428 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5429 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5431 if (wanted_code == NE_EXPR)
5433 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5434 return constant_boolean_node (true, truth_type);
5438 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5439 return constant_boolean_node (false, truth_type);
5443 /* Construct the expression we will return. First get the component
5444 reference we will make. Unless the mask is all ones the width of
5445 that field, perform the mask operation. Then compare with the
5447 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5448 ll_unsignedp || rl_unsignedp);
5450 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5451 if (! all_ones_mask_p (ll_mask, lnbitsize))
5452 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5454 return build2_loc (loc, wanted_code, truth_type, result,
5455 const_binop (BIT_IOR_EXPR, l_const, r_const));
5458 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5462 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5466 enum tree_code op_code;
5469 int consts_equal, consts_lt;
5472 STRIP_SIGN_NOPS (arg0);
5474 op_code = TREE_CODE (arg0);
5475 minmax_const = TREE_OPERAND (arg0, 1);
5476 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5477 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5478 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5479 inner = TREE_OPERAND (arg0, 0);
5481 /* If something does not permit us to optimize, return the original tree. */
5482 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5483 || TREE_CODE (comp_const) != INTEGER_CST
5484 || TREE_OVERFLOW (comp_const)
5485 || TREE_CODE (minmax_const) != INTEGER_CST
5486 || TREE_OVERFLOW (minmax_const))
5489 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5490 and GT_EXPR, doing the rest with recursive calls using logical
5494 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5497 = optimize_minmax_comparison (loc,
5498 invert_tree_comparison (code, false),
5501 return invert_truthvalue_loc (loc, tem);
5507 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5508 optimize_minmax_comparison
5509 (loc, EQ_EXPR, type, arg0, comp_const),
5510 optimize_minmax_comparison
5511 (loc, GT_EXPR, type, arg0, comp_const));
5514 if (op_code == MAX_EXPR && consts_equal)
5515 /* MAX (X, 0) == 0 -> X <= 0 */
5516 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5518 else if (op_code == MAX_EXPR && consts_lt)
5519 /* MAX (X, 0) == 5 -> X == 5 */
5520 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5522 else if (op_code == MAX_EXPR)
5523 /* MAX (X, 0) == -1 -> false */
5524 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5526 else if (consts_equal)
5527 /* MIN (X, 0) == 0 -> X >= 0 */
5528 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5531 /* MIN (X, 0) == 5 -> false */
5532 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5535 /* MIN (X, 0) == -1 -> X == -1 */
5536 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5539 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5540 /* MAX (X, 0) > 0 -> X > 0
5541 MAX (X, 0) > 5 -> X > 5 */
5542 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5544 else if (op_code == MAX_EXPR)
5545 /* MAX (X, 0) > -1 -> true */
5546 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5548 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5549 /* MIN (X, 0) > 0 -> false
5550 MIN (X, 0) > 5 -> false */
5551 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5554 /* MIN (X, 0) > -1 -> X > -1 */
5555 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5562 /* T is an integer expression that is being multiplied, divided, or taken a
5563 modulus (CODE says which and what kind of divide or modulus) by a
5564 constant C. See if we can eliminate that operation by folding it with
5565 other operations already in T. WIDE_TYPE, if non-null, is a type that
5566 should be used for the computation if wider than our type.
5568 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5569 (X * 2) + (Y * 4). We must, however, be assured that either the original
5570 expression would not overflow or that overflow is undefined for the type
5571 in the language in question.
5573 If we return a non-null expression, it is an equivalent form of the
5574 original computation, but need not be in the original type.
5576 We set *STRICT_OVERFLOW_P to true if the return values depends on
5577 signed overflow being undefined. Otherwise we do not change
5578 *STRICT_OVERFLOW_P. */
5581 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5582 bool *strict_overflow_p)
5584 /* To avoid exponential search depth, refuse to allow recursion past
5585 three levels. Beyond that (1) it's highly unlikely that we'll find
5586 something interesting and (2) we've probably processed it before
5587 when we built the inner expression. */
5596 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5603 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5604 bool *strict_overflow_p)
5606 tree type = TREE_TYPE (t);
5607 enum tree_code tcode = TREE_CODE (t);
5608 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5609 > GET_MODE_SIZE (TYPE_MODE (type)))
5610 ? wide_type : type);
5612 int same_p = tcode == code;
5613 tree op0 = NULL_TREE, op1 = NULL_TREE;
5614 bool sub_strict_overflow_p;
5616 /* Don't deal with constants of zero here; they confuse the code below. */
5617 if (integer_zerop (c))
5620 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5621 op0 = TREE_OPERAND (t, 0);
5623 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5624 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5626 /* Note that we need not handle conditional operations here since fold
5627 already handles those cases. So just do arithmetic here. */
5631 /* For a constant, we can always simplify if we are a multiply
5632 or (for divide and modulus) if it is a multiple of our constant. */
5633 if (code == MULT_EXPR
5634 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5635 return const_binop (code, fold_convert (ctype, t),
5636 fold_convert (ctype, c));
5639 CASE_CONVERT: case NON_LVALUE_EXPR:
5640 /* If op0 is an expression ... */
5641 if ((COMPARISON_CLASS_P (op0)
5642 || UNARY_CLASS_P (op0)
5643 || BINARY_CLASS_P (op0)
5644 || VL_EXP_CLASS_P (op0)
5645 || EXPRESSION_CLASS_P (op0))
5646 /* ... and has wrapping overflow, and its type is smaller
5647 than ctype, then we cannot pass through as widening. */
5648 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5649 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5650 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5651 && (TYPE_PRECISION (ctype)
5652 > TYPE_PRECISION (TREE_TYPE (op0))))
5653 /* ... or this is a truncation (t is narrower than op0),
5654 then we cannot pass through this narrowing. */
5655 || (TYPE_PRECISION (type)
5656 < TYPE_PRECISION (TREE_TYPE (op0)))
5657 /* ... or signedness changes for division or modulus,
5658 then we cannot pass through this conversion. */
5659 || (code != MULT_EXPR
5660 && (TYPE_UNSIGNED (ctype)
5661 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5662 /* ... or has undefined overflow while the converted to
5663 type has not, we cannot do the operation in the inner type
5664 as that would introduce undefined overflow. */
5665 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5666 && !TYPE_OVERFLOW_UNDEFINED (type))))
5669 /* Pass the constant down and see if we can make a simplification. If
5670 we can, replace this expression with the inner simplification for
5671 possible later conversion to our or some other type. */
5672 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5673 && TREE_CODE (t2) == INTEGER_CST
5674 && !TREE_OVERFLOW (t2)
5675 && (0 != (t1 = extract_muldiv (op0, t2, code,
5677 ? ctype : NULL_TREE,
5678 strict_overflow_p))))
5683 /* If widening the type changes it from signed to unsigned, then we
5684 must avoid building ABS_EXPR itself as unsigned. */
5685 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5687 tree cstype = (*signed_type_for) (ctype);
5688 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5691 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5692 return fold_convert (ctype, t1);
5696 /* If the constant is negative, we cannot simplify this. */
5697 if (tree_int_cst_sgn (c) == -1)
5701 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5703 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5706 case MIN_EXPR: case MAX_EXPR:
5707 /* If widening the type changes the signedness, then we can't perform
5708 this optimization as that changes the result. */
5709 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5712 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5713 sub_strict_overflow_p = false;
5714 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5715 &sub_strict_overflow_p)) != 0
5716 && (t2 = extract_muldiv (op1, c, code, wide_type,
5717 &sub_strict_overflow_p)) != 0)
5719 if (tree_int_cst_sgn (c) < 0)
5720 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5721 if (sub_strict_overflow_p)
5722 *strict_overflow_p = true;
5723 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5724 fold_convert (ctype, t2));
5728 case LSHIFT_EXPR: case RSHIFT_EXPR:
5729 /* If the second operand is constant, this is a multiplication
5730 or floor division, by a power of two, so we can treat it that
5731 way unless the multiplier or divisor overflows. Signed
5732 left-shift overflow is implementation-defined rather than
5733 undefined in C90, so do not convert signed left shift into
5735 if (TREE_CODE (op1) == INTEGER_CST
5736 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5737 /* const_binop may not detect overflow correctly,
5738 so check for it explicitly here. */
5739 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5740 && TREE_INT_CST_HIGH (op1) == 0
5741 && 0 != (t1 = fold_convert (ctype,
5742 const_binop (LSHIFT_EXPR,
5745 && !TREE_OVERFLOW (t1))
5746 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5747 ? MULT_EXPR : FLOOR_DIV_EXPR,
5749 fold_convert (ctype, op0),
5751 c, code, wide_type, strict_overflow_p);
5754 case PLUS_EXPR: case MINUS_EXPR:
5755 /* See if we can eliminate the operation on both sides. If we can, we
5756 can return a new PLUS or MINUS. If we can't, the only remaining
5757 cases where we can do anything are if the second operand is a
5759 sub_strict_overflow_p = false;
5760 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5761 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5762 if (t1 != 0 && t2 != 0
5763 && (code == MULT_EXPR
5764 /* If not multiplication, we can only do this if both operands
5765 are divisible by c. */
5766 || (multiple_of_p (ctype, op0, c)
5767 && multiple_of_p (ctype, op1, c))))
5769 if (sub_strict_overflow_p)
5770 *strict_overflow_p = true;
5771 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5772 fold_convert (ctype, t2));
5775 /* If this was a subtraction, negate OP1 and set it to be an addition.
5776 This simplifies the logic below. */
5777 if (tcode == MINUS_EXPR)
5779 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5780 /* If OP1 was not easily negatable, the constant may be OP0. */
5781 if (TREE_CODE (op0) == INTEGER_CST)
5792 if (TREE_CODE (op1) != INTEGER_CST)
5795 /* If either OP1 or C are negative, this optimization is not safe for
5796 some of the division and remainder types while for others we need
5797 to change the code. */
5798 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5800 if (code == CEIL_DIV_EXPR)
5801 code = FLOOR_DIV_EXPR;
5802 else if (code == FLOOR_DIV_EXPR)
5803 code = CEIL_DIV_EXPR;
5804 else if (code != MULT_EXPR
5805 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5809 /* If it's a multiply or a division/modulus operation of a multiple
5810 of our constant, do the operation and verify it doesn't overflow. */
5811 if (code == MULT_EXPR
5812 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5814 op1 = const_binop (code, fold_convert (ctype, op1),
5815 fold_convert (ctype, c));
5816 /* We allow the constant to overflow with wrapping semantics. */
5818 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5824 /* If we have an unsigned type is not a sizetype, we cannot widen
5825 the operation since it will change the result if the original
5826 computation overflowed. */
5827 if (TYPE_UNSIGNED (ctype)
5828 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5832 /* If we were able to eliminate our operation from the first side,
5833 apply our operation to the second side and reform the PLUS. */
5834 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5835 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5837 /* The last case is if we are a multiply. In that case, we can
5838 apply the distributive law to commute the multiply and addition
5839 if the multiplication of the constants doesn't overflow. */
5840 if (code == MULT_EXPR)
5841 return fold_build2 (tcode, ctype,
5842 fold_build2 (code, ctype,
5843 fold_convert (ctype, op0),
5844 fold_convert (ctype, c)),
5850 /* We have a special case here if we are doing something like
5851 (C * 8) % 4 since we know that's zero. */
5852 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5853 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5854 /* If the multiplication can overflow we cannot optimize this.
5855 ??? Until we can properly mark individual operations as
5856 not overflowing we need to treat sizetype special here as
5857 stor-layout relies on this opimization to make
5858 DECL_FIELD_BIT_OFFSET always a constant. */
5859 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5860 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5861 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5862 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5863 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5865 *strict_overflow_p = true;
5866 return omit_one_operand (type, integer_zero_node, op0);
5869 /* ... fall through ... */
5871 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5872 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5873 /* If we can extract our operation from the LHS, do so and return a
5874 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5875 do something only if the second operand is a constant. */
5877 && (t1 = extract_muldiv (op0, c, code, wide_type,
5878 strict_overflow_p)) != 0)
5879 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5880 fold_convert (ctype, op1));
5881 else if (tcode == MULT_EXPR && code == MULT_EXPR
5882 && (t1 = extract_muldiv (op1, c, code, wide_type,
5883 strict_overflow_p)) != 0)
5884 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5885 fold_convert (ctype, t1));
5886 else if (TREE_CODE (op1) != INTEGER_CST)
5889 /* If these are the same operation types, we can associate them
5890 assuming no overflow. */
5892 && 0 != (t1 = int_const_binop (MULT_EXPR,
5893 fold_convert (ctype, op1),
5894 fold_convert (ctype, c), 1))
5895 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5896 (TYPE_UNSIGNED (ctype)
5897 && tcode != MULT_EXPR) ? -1 : 1,
5898 TREE_OVERFLOW (t1)))
5899 && !TREE_OVERFLOW (t1))
5900 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5902 /* If these operations "cancel" each other, we have the main
5903 optimizations of this pass, which occur when either constant is a
5904 multiple of the other, in which case we replace this with either an
5905 operation or CODE or TCODE.
5907 If we have an unsigned type that is not a sizetype, we cannot do
5908 this since it will change the result if the original computation
5910 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5911 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5912 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5913 || (tcode == MULT_EXPR
5914 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5915 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5916 && code != MULT_EXPR)))
5918 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5920 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5921 *strict_overflow_p = true;
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5923 fold_convert (ctype,
5924 const_binop (TRUNC_DIV_EXPR,
5927 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5929 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5930 *strict_overflow_p = true;
5931 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5932 fold_convert (ctype,
5933 const_binop (TRUNC_DIV_EXPR,
5946 /* Return a node which has the indicated constant VALUE (either 0 or
5947 1), and is of the indicated TYPE. */
5950 constant_boolean_node (int value, tree type)
5952 if (type == integer_type_node)
5953 return value ? integer_one_node : integer_zero_node;
5954 else if (type == boolean_type_node)
5955 return value ? boolean_true_node : boolean_false_node;
5957 return build_int_cst_type (type, value);
5959 return build_int_cst (NULL_TREE, value);
5963 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5964 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5965 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5966 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5967 COND is the first argument to CODE; otherwise (as in the example
5968 given here), it is the second argument. TYPE is the type of the
5969 original expression. Return NULL_TREE if no simplification is
5973 fold_binary_op_with_conditional_arg (location_t loc,
5974 enum tree_code code,
5975 tree type, tree op0, tree op1,
5976 tree cond, tree arg, int cond_first_p)
5978 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5979 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5980 tree test, true_value, false_value;
5981 tree lhs = NULL_TREE;
5982 tree rhs = NULL_TREE;
5984 if (TREE_CODE (cond) == COND_EXPR)
5986 test = TREE_OPERAND (cond, 0);
5987 true_value = TREE_OPERAND (cond, 1);
5988 false_value = TREE_OPERAND (cond, 2);
5989 /* If this operand throws an expression, then it does not make
5990 sense to try to perform a logical or arithmetic operation
5992 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5994 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5999 tree testtype = TREE_TYPE (cond);
6001 true_value = constant_boolean_node (true, testtype);
6002 false_value = constant_boolean_node (false, testtype);
6005 /* This transformation is only worthwhile if we don't have to wrap ARG
6006 in a SAVE_EXPR and the operation can be simplified without recursing
6007 on at least one of the branches once its pushed inside the COND_EXPR. */
6008 if (!TREE_CONSTANT (arg)
6009 && (TREE_SIDE_EFFECTS (arg)
6010 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6011 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6014 arg = fold_convert_loc (loc, arg_type, arg);
6017 true_value = fold_convert_loc (loc, cond_type, true_value);
6019 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6021 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6025 false_value = fold_convert_loc (loc, cond_type, false_value);
6027 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6029 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6032 /* Check that we have simplified at least one of the branches. */
6033 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6036 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6040 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6042 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6043 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6044 ADDEND is the same as X.
6046 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6047 and finite. The problematic cases are when X is zero, and its mode
6048 has signed zeros. In the case of rounding towards -infinity,
6049 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6050 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6053 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6055 if (!real_zerop (addend))
6058 /* Don't allow the fold with -fsignaling-nans. */
6059 if (HONOR_SNANS (TYPE_MODE (type)))
6062 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6063 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6066 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6067 if (TREE_CODE (addend) == REAL_CST
6068 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6071 /* The mode has signed zeros, and we have to honor their sign.
6072 In this situation, there is only one case we can return true for.
6073 X - 0 is the same as X unless rounding towards -infinity is
6075 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6078 /* Subroutine of fold() that checks comparisons of built-in math
6079 functions against real constants.
6081 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6082 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6083 is the type of the result and ARG0 and ARG1 are the operands of the
6084 comparison. ARG1 must be a TREE_REAL_CST.
6086 The function returns the constant folded tree if a simplification
6087 can be made, and NULL_TREE otherwise. */
6090 fold_mathfn_compare (location_t loc,
6091 enum built_in_function fcode, enum tree_code code,
6092 tree type, tree arg0, tree arg1)
6096 if (BUILTIN_SQRT_P (fcode))
6098 tree arg = CALL_EXPR_ARG (arg0, 0);
6099 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6101 c = TREE_REAL_CST (arg1);
6102 if (REAL_VALUE_NEGATIVE (c))
6104 /* sqrt(x) < y is always false, if y is negative. */
6105 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6106 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6108 /* sqrt(x) > y is always true, if y is negative and we
6109 don't care about NaNs, i.e. negative values of x. */
6110 if (code == NE_EXPR || !HONOR_NANS (mode))
6111 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6113 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6114 return fold_build2_loc (loc, GE_EXPR, type, arg,
6115 build_real (TREE_TYPE (arg), dconst0));
6117 else if (code == GT_EXPR || code == GE_EXPR)
6121 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6122 real_convert (&c2, mode, &c2);
6124 if (REAL_VALUE_ISINF (c2))
6126 /* sqrt(x) > y is x == +Inf, when y is very large. */
6127 if (HONOR_INFINITIES (mode))
6128 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6129 build_real (TREE_TYPE (arg), c2));
6131 /* sqrt(x) > y is always false, when y is very large
6132 and we don't care about infinities. */
6133 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6136 /* sqrt(x) > c is the same as x > c*c. */
6137 return fold_build2_loc (loc, code, type, arg,
6138 build_real (TREE_TYPE (arg), c2));
6140 else if (code == LT_EXPR || code == LE_EXPR)
6144 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6145 real_convert (&c2, mode, &c2);
6147 if (REAL_VALUE_ISINF (c2))
6149 /* sqrt(x) < y is always true, when y is a very large
6150 value and we don't care about NaNs or Infinities. */
6151 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6152 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6154 /* sqrt(x) < y is x != +Inf when y is very large and we
6155 don't care about NaNs. */
6156 if (! HONOR_NANS (mode))
6157 return fold_build2_loc (loc, NE_EXPR, type, arg,
6158 build_real (TREE_TYPE (arg), c2));
6160 /* sqrt(x) < y is x >= 0 when y is very large and we
6161 don't care about Infinities. */
6162 if (! HONOR_INFINITIES (mode))
6163 return fold_build2_loc (loc, GE_EXPR, type, arg,
6164 build_real (TREE_TYPE (arg), dconst0));
6166 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6167 if (lang_hooks.decls.global_bindings_p () != 0
6168 || CONTAINS_PLACEHOLDER_P (arg))
6171 arg = save_expr (arg);
6172 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6173 fold_build2_loc (loc, GE_EXPR, type, arg,
6174 build_real (TREE_TYPE (arg),
6176 fold_build2_loc (loc, NE_EXPR, type, arg,
6177 build_real (TREE_TYPE (arg),
6181 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6182 if (! HONOR_NANS (mode))
6183 return fold_build2_loc (loc, code, type, arg,
6184 build_real (TREE_TYPE (arg), c2));
6186 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6187 if (lang_hooks.decls.global_bindings_p () == 0
6188 && ! CONTAINS_PLACEHOLDER_P (arg))
6190 arg = save_expr (arg);
6191 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6192 fold_build2_loc (loc, GE_EXPR, type, arg,
6193 build_real (TREE_TYPE (arg),
6195 fold_build2_loc (loc, code, type, arg,
6196 build_real (TREE_TYPE (arg),
6205 /* Subroutine of fold() that optimizes comparisons against Infinities,
6206 either +Inf or -Inf.
6208 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6209 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6210 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6212 The function returns the constant folded tree if a simplification
6213 can be made, and NULL_TREE otherwise. */
6216 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6217 tree arg0, tree arg1)
6219 enum machine_mode mode;
6220 REAL_VALUE_TYPE max;
6224 mode = TYPE_MODE (TREE_TYPE (arg0));
6226 /* For negative infinity swap the sense of the comparison. */
6227 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6229 code = swap_tree_comparison (code);
6234 /* x > +Inf is always false, if with ignore sNANs. */
6235 if (HONOR_SNANS (mode))
6237 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6240 /* x <= +Inf is always true, if we don't case about NaNs. */
6241 if (! HONOR_NANS (mode))
6242 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6244 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6245 if (lang_hooks.decls.global_bindings_p () == 0
6246 && ! CONTAINS_PLACEHOLDER_P (arg0))
6248 arg0 = save_expr (arg0);
6249 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6255 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6256 real_maxval (&max, neg, mode);
6257 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6258 arg0, build_real (TREE_TYPE (arg0), max));
6261 /* x < +Inf is always equal to x <= DBL_MAX. */
6262 real_maxval (&max, neg, mode);
6263 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6264 arg0, build_real (TREE_TYPE (arg0), max));
6267 /* x != +Inf is always equal to !(x > DBL_MAX). */
6268 real_maxval (&max, neg, mode);
6269 if (! HONOR_NANS (mode))
6270 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6271 arg0, build_real (TREE_TYPE (arg0), max));
6273 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6274 arg0, build_real (TREE_TYPE (arg0), max));
6275 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6284 /* Subroutine of fold() that optimizes comparisons of a division by
6285 a nonzero integer constant against an integer constant, i.e.
6288 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6289 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6290 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6292 The function returns the constant folded tree if a simplification
6293 can be made, and NULL_TREE otherwise. */
6296 fold_div_compare (location_t loc,
6297 enum tree_code code, tree type, tree arg0, tree arg1)
6299 tree prod, tmp, hi, lo;
6300 tree arg00 = TREE_OPERAND (arg0, 0);
6301 tree arg01 = TREE_OPERAND (arg0, 1);
6303 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6307 /* We have to do this the hard way to detect unsigned overflow.
6308 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6309 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6310 TREE_INT_CST_HIGH (arg01),
6311 TREE_INT_CST_LOW (arg1),
6312 TREE_INT_CST_HIGH (arg1),
6313 &val.low, &val.high, unsigned_p);
6314 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6315 neg_overflow = false;
6319 tmp = int_const_binop (MINUS_EXPR, arg01,
6320 build_int_cst (TREE_TYPE (arg01), 1), 0);
6323 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6324 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6325 TREE_INT_CST_HIGH (prod),
6326 TREE_INT_CST_LOW (tmp),
6327 TREE_INT_CST_HIGH (tmp),
6328 &val.low, &val.high, unsigned_p);
6329 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6330 -1, overflow | TREE_OVERFLOW (prod));
6332 else if (tree_int_cst_sgn (arg01) >= 0)
6334 tmp = int_const_binop (MINUS_EXPR, arg01,
6335 build_int_cst (TREE_TYPE (arg01), 1), 0);
6336 switch (tree_int_cst_sgn (arg1))
6339 neg_overflow = true;
6340 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6345 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6350 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6360 /* A negative divisor reverses the relational operators. */
6361 code = swap_tree_comparison (code);
6363 tmp = int_const_binop (PLUS_EXPR, arg01,
6364 build_int_cst (TREE_TYPE (arg01), 1), 0);
6365 switch (tree_int_cst_sgn (arg1))
6368 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6373 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6378 neg_overflow = true;
6379 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6391 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6392 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6393 if (TREE_OVERFLOW (hi))
6394 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6395 if (TREE_OVERFLOW (lo))
6396 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6397 return build_range_check (loc, type, arg00, 1, lo, hi);
6400 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6401 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6402 if (TREE_OVERFLOW (hi))
6403 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6404 if (TREE_OVERFLOW (lo))
6405 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6406 return build_range_check (loc, type, arg00, 0, lo, hi);
6409 if (TREE_OVERFLOW (lo))
6411 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6412 return omit_one_operand_loc (loc, type, tmp, arg00);
6414 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6417 if (TREE_OVERFLOW (hi))
6419 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6420 return omit_one_operand_loc (loc, type, tmp, arg00);
6422 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6425 if (TREE_OVERFLOW (hi))
6427 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6428 return omit_one_operand_loc (loc, type, tmp, arg00);
6430 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6433 if (TREE_OVERFLOW (lo))
6435 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6436 return omit_one_operand_loc (loc, type, tmp, arg00);
6438 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6448 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6449 equality/inequality test, then return a simplified form of the test
6450 using a sign testing. Otherwise return NULL. TYPE is the desired
6454 fold_single_bit_test_into_sign_test (location_t loc,
6455 enum tree_code code, tree arg0, tree arg1,
6458 /* If this is testing a single bit, we can optimize the test. */
6459 if ((code == NE_EXPR || code == EQ_EXPR)
6460 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6461 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6463 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6464 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6465 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6467 if (arg00 != NULL_TREE
6468 /* This is only a win if casting to a signed type is cheap,
6469 i.e. when arg00's type is not a partial mode. */
6470 && TYPE_PRECISION (TREE_TYPE (arg00))
6471 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6473 tree stype = signed_type_for (TREE_TYPE (arg00));
6474 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6476 fold_convert_loc (loc, stype, arg00),
6477 build_int_cst (stype, 0));
6484 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6485 equality/inequality test, then return a simplified form of
6486 the test using shifts and logical operations. Otherwise return
6487 NULL. TYPE is the desired result type. */
6490 fold_single_bit_test (location_t loc, enum tree_code code,
6491 tree arg0, tree arg1, tree result_type)
6493 /* If this is testing a single bit, we can optimize the test. */
6494 if ((code == NE_EXPR || code == EQ_EXPR)
6495 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6496 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6498 tree inner = TREE_OPERAND (arg0, 0);
6499 tree type = TREE_TYPE (arg0);
6500 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6501 enum machine_mode operand_mode = TYPE_MODE (type);
6503 tree signed_type, unsigned_type, intermediate_type;
6506 /* First, see if we can fold the single bit test into a sign-bit
6508 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6513 /* Otherwise we have (A & C) != 0 where C is a single bit,
6514 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6515 Similarly for (A & C) == 0. */
6517 /* If INNER is a right shift of a constant and it plus BITNUM does
6518 not overflow, adjust BITNUM and INNER. */
6519 if (TREE_CODE (inner) == RSHIFT_EXPR
6520 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6521 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6522 && bitnum < TYPE_PRECISION (type)
6523 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6524 bitnum - TYPE_PRECISION (type)))
6526 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6527 inner = TREE_OPERAND (inner, 0);
6530 /* If we are going to be able to omit the AND below, we must do our
6531 operations as unsigned. If we must use the AND, we have a choice.
6532 Normally unsigned is faster, but for some machines signed is. */
6533 #ifdef LOAD_EXTEND_OP
6534 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6535 && !flag_syntax_only) ? 0 : 1;
6540 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6541 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6542 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6543 inner = fold_convert_loc (loc, intermediate_type, inner);
6546 inner = build2 (RSHIFT_EXPR, intermediate_type,
6547 inner, size_int (bitnum));
6549 one = build_int_cst (intermediate_type, 1);
6551 if (code == EQ_EXPR)
6552 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6554 /* Put the AND last so it can combine with more things. */
6555 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6557 /* Make sure to return the proper type. */
6558 inner = fold_convert_loc (loc, result_type, inner);
6565 /* Check whether we are allowed to reorder operands arg0 and arg1,
6566 such that the evaluation of arg1 occurs before arg0. */
6569 reorder_operands_p (const_tree arg0, const_tree arg1)
6571 if (! flag_evaluation_order)
6573 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6575 return ! TREE_SIDE_EFFECTS (arg0)
6576 && ! TREE_SIDE_EFFECTS (arg1);
6579 /* Test whether it is preferable two swap two operands, ARG0 and
6580 ARG1, for example because ARG0 is an integer constant and ARG1
6581 isn't. If REORDER is true, only recommend swapping if we can
6582 evaluate the operands in reverse order. */
6585 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6587 STRIP_SIGN_NOPS (arg0);
6588 STRIP_SIGN_NOPS (arg1);
6590 if (TREE_CODE (arg1) == INTEGER_CST)
6592 if (TREE_CODE (arg0) == INTEGER_CST)
6595 if (TREE_CODE (arg1) == REAL_CST)
6597 if (TREE_CODE (arg0) == REAL_CST)
6600 if (TREE_CODE (arg1) == FIXED_CST)
6602 if (TREE_CODE (arg0) == FIXED_CST)
6605 if (TREE_CODE (arg1) == COMPLEX_CST)
6607 if (TREE_CODE (arg0) == COMPLEX_CST)
6610 if (TREE_CONSTANT (arg1))
6612 if (TREE_CONSTANT (arg0))
6615 if (optimize_function_for_size_p (cfun))
6618 if (reorder && flag_evaluation_order
6619 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6622 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6623 for commutative and comparison operators. Ensuring a canonical
6624 form allows the optimizers to find additional redundancies without
6625 having to explicitly check for both orderings. */
6626 if (TREE_CODE (arg0) == SSA_NAME
6627 && TREE_CODE (arg1) == SSA_NAME
6628 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6631 /* Put SSA_NAMEs last. */
6632 if (TREE_CODE (arg1) == SSA_NAME)
6634 if (TREE_CODE (arg0) == SSA_NAME)
6637 /* Put variables last. */
6646 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6647 ARG0 is extended to a wider type. */
6650 fold_widened_comparison (location_t loc, enum tree_code code,
6651 tree type, tree arg0, tree arg1)
6653 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6655 tree shorter_type, outer_type;
6659 if (arg0_unw == arg0)
6661 shorter_type = TREE_TYPE (arg0_unw);
6663 #ifdef HAVE_canonicalize_funcptr_for_compare
6664 /* Disable this optimization if we're casting a function pointer
6665 type on targets that require function pointer canonicalization. */
6666 if (HAVE_canonicalize_funcptr_for_compare
6667 && TREE_CODE (shorter_type) == POINTER_TYPE
6668 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6672 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6675 arg1_unw = get_unwidened (arg1, NULL_TREE);
6677 /* If possible, express the comparison in the shorter mode. */
6678 if ((code == EQ_EXPR || code == NE_EXPR
6679 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6680 && (TREE_TYPE (arg1_unw) == shorter_type
6681 || ((TYPE_PRECISION (shorter_type)
6682 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6683 && (TYPE_UNSIGNED (shorter_type)
6684 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6685 || (TREE_CODE (arg1_unw) == INTEGER_CST
6686 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6687 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6688 && int_fits_type_p (arg1_unw, shorter_type))))
6689 return fold_build2_loc (loc, code, type, arg0_unw,
6690 fold_convert_loc (loc, shorter_type, arg1_unw));
6692 if (TREE_CODE (arg1_unw) != INTEGER_CST
6693 || TREE_CODE (shorter_type) != INTEGER_TYPE
6694 || !int_fits_type_p (arg1_unw, shorter_type))
6697 /* If we are comparing with the integer that does not fit into the range
6698 of the shorter type, the result is known. */
6699 outer_type = TREE_TYPE (arg1_unw);
6700 min = lower_bound_in_type (outer_type, shorter_type);
6701 max = upper_bound_in_type (outer_type, shorter_type);
6703 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6705 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6712 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6717 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6723 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6725 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6730 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6732 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6741 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6742 ARG0 just the signedness is changed. */
6745 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6746 tree arg0, tree arg1)
6749 tree inner_type, outer_type;
6751 if (!CONVERT_EXPR_P (arg0))
6754 outer_type = TREE_TYPE (arg0);
6755 arg0_inner = TREE_OPERAND (arg0, 0);
6756 inner_type = TREE_TYPE (arg0_inner);
6758 #ifdef HAVE_canonicalize_funcptr_for_compare
6759 /* Disable this optimization if we're casting a function pointer
6760 type on targets that require function pointer canonicalization. */
6761 if (HAVE_canonicalize_funcptr_for_compare
6762 && TREE_CODE (inner_type) == POINTER_TYPE
6763 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6767 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6770 if (TREE_CODE (arg1) != INTEGER_CST
6771 && !(CONVERT_EXPR_P (arg1)
6772 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6775 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6780 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6783 if (TREE_CODE (arg1) == INTEGER_CST)
6784 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6785 0, TREE_OVERFLOW (arg1));
6787 arg1 = fold_convert_loc (loc, inner_type, arg1);
6789 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6792 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6793 step of the array. Reconstructs s and delta in the case of s *
6794 delta being an integer constant (and thus already folded). ADDR is
6795 the address. MULT is the multiplicative expression. If the
6796 function succeeds, the new address expression is returned.
6797 Otherwise NULL_TREE is returned. LOC is the location of the
6798 resulting expression. */
6801 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6803 tree s, delta, step;
6804 tree ref = TREE_OPERAND (addr, 0), pref;
6809 /* Strip the nops that might be added when converting op1 to sizetype. */
6812 /* Canonicalize op1 into a possibly non-constant delta
6813 and an INTEGER_CST s. */
6814 if (TREE_CODE (op1) == MULT_EXPR)
6816 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6821 if (TREE_CODE (arg0) == INTEGER_CST)
6826 else if (TREE_CODE (arg1) == INTEGER_CST)
6834 else if (TREE_CODE (op1) == INTEGER_CST)
6841 /* Simulate we are delta * 1. */
6843 s = integer_one_node;
6846 for (;; ref = TREE_OPERAND (ref, 0))
6848 if (TREE_CODE (ref) == ARRAY_REF)
6852 /* Remember if this was a multi-dimensional array. */
6853 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6856 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6859 itype = TREE_TYPE (domain);
6861 step = array_ref_element_size (ref);
6862 if (TREE_CODE (step) != INTEGER_CST)
6867 if (! tree_int_cst_equal (step, s))
6872 /* Try if delta is a multiple of step. */
6873 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6879 /* Only fold here if we can verify we do not overflow one
6880 dimension of a multi-dimensional array. */
6885 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6886 || !TYPE_MAX_VALUE (domain)
6887 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6890 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6891 fold_convert_loc (loc, itype,
6892 TREE_OPERAND (ref, 1)),
6893 fold_convert_loc (loc, itype, delta));
6895 || TREE_CODE (tmp) != INTEGER_CST
6896 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6905 if (!handled_component_p (ref))
6909 /* We found the suitable array reference. So copy everything up to it,
6910 and replace the index. */
6912 pref = TREE_OPERAND (addr, 0);
6913 ret = copy_node (pref);
6914 SET_EXPR_LOCATION (ret, loc);
6919 pref = TREE_OPERAND (pref, 0);
6920 TREE_OPERAND (pos, 0) = copy_node (pref);
6921 pos = TREE_OPERAND (pos, 0);
6924 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6925 fold_convert_loc (loc, itype,
6926 TREE_OPERAND (pos, 1)),
6927 fold_convert_loc (loc, itype, delta));
6929 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6933 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6934 means A >= Y && A != MAX, but in this case we know that
6935 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6938 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6940 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6942 if (TREE_CODE (bound) == LT_EXPR)
6943 a = TREE_OPERAND (bound, 0);
6944 else if (TREE_CODE (bound) == GT_EXPR)
6945 a = TREE_OPERAND (bound, 1);
6949 typea = TREE_TYPE (a);
6950 if (!INTEGRAL_TYPE_P (typea)
6951 && !POINTER_TYPE_P (typea))
6954 if (TREE_CODE (ineq) == LT_EXPR)
6956 a1 = TREE_OPERAND (ineq, 1);
6957 y = TREE_OPERAND (ineq, 0);
6959 else if (TREE_CODE (ineq) == GT_EXPR)
6961 a1 = TREE_OPERAND (ineq, 0);
6962 y = TREE_OPERAND (ineq, 1);
6967 if (TREE_TYPE (a1) != typea)
6970 if (POINTER_TYPE_P (typea))
6972 /* Convert the pointer types into integer before taking the difference. */
6973 tree ta = fold_convert_loc (loc, ssizetype, a);
6974 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6975 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6978 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6980 if (!diff || !integer_onep (diff))
6983 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6986 /* Fold a sum or difference of at least one multiplication.
6987 Returns the folded tree or NULL if no simplification could be made. */
6990 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6991 tree arg0, tree arg1)
6993 tree arg00, arg01, arg10, arg11;
6994 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6996 /* (A * C) +- (B * C) -> (A+-B) * C.
6997 (A * C) +- A -> A * (C+-1).
6998 We are most concerned about the case where C is a constant,
6999 but other combinations show up during loop reduction. Since
7000 it is not difficult, try all four possibilities. */
7002 if (TREE_CODE (arg0) == MULT_EXPR)
7004 arg00 = TREE_OPERAND (arg0, 0);
7005 arg01 = TREE_OPERAND (arg0, 1);
7007 else if (TREE_CODE (arg0) == INTEGER_CST)
7009 arg00 = build_one_cst (type);
7014 /* We cannot generate constant 1 for fract. */
7015 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7018 arg01 = build_one_cst (type);
7020 if (TREE_CODE (arg1) == MULT_EXPR)
7022 arg10 = TREE_OPERAND (arg1, 0);
7023 arg11 = TREE_OPERAND (arg1, 1);
7025 else if (TREE_CODE (arg1) == INTEGER_CST)
7027 arg10 = build_one_cst (type);
7028 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7029 the purpose of this canonicalization. */
7030 if (TREE_INT_CST_HIGH (arg1) == -1
7031 && negate_expr_p (arg1)
7032 && code == PLUS_EXPR)
7034 arg11 = negate_expr (arg1);
7042 /* We cannot generate constant 1 for fract. */
7043 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7046 arg11 = build_one_cst (type);
7050 if (operand_equal_p (arg01, arg11, 0))
7051 same = arg01, alt0 = arg00, alt1 = arg10;
7052 else if (operand_equal_p (arg00, arg10, 0))
7053 same = arg00, alt0 = arg01, alt1 = arg11;
7054 else if (operand_equal_p (arg00, arg11, 0))
7055 same = arg00, alt0 = arg01, alt1 = arg10;
7056 else if (operand_equal_p (arg01, arg10, 0))
7057 same = arg01, alt0 = arg00, alt1 = arg11;
7059 /* No identical multiplicands; see if we can find a common
7060 power-of-two factor in non-power-of-two multiplies. This
7061 can help in multi-dimensional array access. */
7062 else if (host_integerp (arg01, 0)
7063 && host_integerp (arg11, 0))
7065 HOST_WIDE_INT int01, int11, tmp;
7068 int01 = TREE_INT_CST_LOW (arg01);
7069 int11 = TREE_INT_CST_LOW (arg11);
7071 /* Move min of absolute values to int11. */
7072 if ((int01 >= 0 ? int01 : -int01)
7073 < (int11 >= 0 ? int11 : -int11))
7075 tmp = int01, int01 = int11, int11 = tmp;
7076 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7083 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7084 /* The remainder should not be a constant, otherwise we
7085 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7086 increased the number of multiplications necessary. */
7087 && TREE_CODE (arg10) != INTEGER_CST)
7089 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7090 build_int_cst (TREE_TYPE (arg00),
7095 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7100 return fold_build2_loc (loc, MULT_EXPR, type,
7101 fold_build2_loc (loc, code, type,
7102 fold_convert_loc (loc, type, alt0),
7103 fold_convert_loc (loc, type, alt1)),
7104 fold_convert_loc (loc, type, same));
7109 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7110 specified by EXPR into the buffer PTR of length LEN bytes.
7111 Return the number of bytes placed in the buffer, or zero
7115 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7117 tree type = TREE_TYPE (expr);
7118 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7119 int byte, offset, word, words;
7120 unsigned char value;
7122 if (total_bytes > len)
7124 words = total_bytes / UNITS_PER_WORD;
7126 for (byte = 0; byte < total_bytes; byte++)
7128 int bitpos = byte * BITS_PER_UNIT;
7129 if (bitpos < HOST_BITS_PER_WIDE_INT)
7130 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7132 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7133 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7135 if (total_bytes > UNITS_PER_WORD)
7137 word = byte / UNITS_PER_WORD;
7138 if (WORDS_BIG_ENDIAN)
7139 word = (words - 1) - word;
7140 offset = word * UNITS_PER_WORD;
7141 if (BYTES_BIG_ENDIAN)
7142 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7144 offset += byte % UNITS_PER_WORD;
7147 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7148 ptr[offset] = value;
7154 /* Subroutine of native_encode_expr. Encode the REAL_CST
7155 specified by EXPR into the buffer PTR of length LEN bytes.
7156 Return the number of bytes placed in the buffer, or zero
7160 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7162 tree type = TREE_TYPE (expr);
7163 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7164 int byte, offset, word, words, bitpos;
7165 unsigned char value;
7167 /* There are always 32 bits in each long, no matter the size of
7168 the hosts long. We handle floating point representations with
7172 if (total_bytes > len)
7174 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7176 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7178 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7179 bitpos += BITS_PER_UNIT)
7181 byte = (bitpos / BITS_PER_UNIT) & 3;
7182 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7184 if (UNITS_PER_WORD < 4)
7186 word = byte / UNITS_PER_WORD;
7187 if (WORDS_BIG_ENDIAN)
7188 word = (words - 1) - word;
7189 offset = word * UNITS_PER_WORD;
7190 if (BYTES_BIG_ENDIAN)
7191 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7193 offset += byte % UNITS_PER_WORD;
7196 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7197 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7202 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7203 specified by EXPR into the buffer PTR of length LEN bytes.
7204 Return the number of bytes placed in the buffer, or zero
7208 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7213 part = TREE_REALPART (expr);
7214 rsize = native_encode_expr (part, ptr, len);
7217 part = TREE_IMAGPART (expr);
7218 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7221 return rsize + isize;
7225 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7226 specified by EXPR into the buffer PTR of length LEN bytes.
7227 Return the number of bytes placed in the buffer, or zero
7231 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7233 int i, size, offset, count;
7234 tree itype, elem, elements;
7237 elements = TREE_VECTOR_CST_ELTS (expr);
7238 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7239 itype = TREE_TYPE (TREE_TYPE (expr));
7240 size = GET_MODE_SIZE (TYPE_MODE (itype));
7241 for (i = 0; i < count; i++)
7245 elem = TREE_VALUE (elements);
7246 elements = TREE_CHAIN (elements);
7253 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7258 if (offset + size > len)
7260 memset (ptr+offset, 0, size);
7268 /* Subroutine of native_encode_expr. Encode the STRING_CST
7269 specified by EXPR into the buffer PTR of length LEN bytes.
7270 Return the number of bytes placed in the buffer, or zero
7274 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7276 tree type = TREE_TYPE (expr);
7277 HOST_WIDE_INT total_bytes;
7279 if (TREE_CODE (type) != ARRAY_TYPE
7280 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7281 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7282 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7284 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7285 if (total_bytes > len)
7287 if (TREE_STRING_LENGTH (expr) < total_bytes)
7289 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7290 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7291 total_bytes - TREE_STRING_LENGTH (expr));
7294 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7299 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7300 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7301 buffer PTR of length LEN bytes. Return the number of bytes
7302 placed in the buffer, or zero upon failure. */
7305 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7307 switch (TREE_CODE (expr))
7310 return native_encode_int (expr, ptr, len);
7313 return native_encode_real (expr, ptr, len);
7316 return native_encode_complex (expr, ptr, len);
7319 return native_encode_vector (expr, ptr, len);
7322 return native_encode_string (expr, ptr, len);
7330 /* Subroutine of native_interpret_expr. Interpret the contents of
7331 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7332 If the buffer cannot be interpreted, return NULL_TREE. */
7335 native_interpret_int (tree type, const unsigned char *ptr, int len)
7337 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7338 int byte, offset, word, words;
7339 unsigned char value;
7342 if (total_bytes > len)
7344 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7347 result = double_int_zero;
7348 words = total_bytes / UNITS_PER_WORD;
7350 for (byte = 0; byte < total_bytes; byte++)
7352 int bitpos = byte * BITS_PER_UNIT;
7353 if (total_bytes > UNITS_PER_WORD)
7355 word = byte / UNITS_PER_WORD;
7356 if (WORDS_BIG_ENDIAN)
7357 word = (words - 1) - word;
7358 offset = word * UNITS_PER_WORD;
7359 if (BYTES_BIG_ENDIAN)
7360 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7362 offset += byte % UNITS_PER_WORD;
7365 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7366 value = ptr[offset];
7368 if (bitpos < HOST_BITS_PER_WIDE_INT)
7369 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7371 result.high |= (unsigned HOST_WIDE_INT) value
7372 << (bitpos - HOST_BITS_PER_WIDE_INT);
7375 return double_int_to_tree (type, result);
7379 /* Subroutine of native_interpret_expr. Interpret the contents of
7380 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7381 If the buffer cannot be interpreted, return NULL_TREE. */
7384 native_interpret_real (tree type, const unsigned char *ptr, int len)
7386 enum machine_mode mode = TYPE_MODE (type);
7387 int total_bytes = GET_MODE_SIZE (mode);
7388 int byte, offset, word, words, bitpos;
7389 unsigned char value;
7390 /* There are always 32 bits in each long, no matter the size of
7391 the hosts long. We handle floating point representations with
7396 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7397 if (total_bytes > len || total_bytes > 24)
7399 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7401 memset (tmp, 0, sizeof (tmp));
7402 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7403 bitpos += BITS_PER_UNIT)
7405 byte = (bitpos / BITS_PER_UNIT) & 3;
7406 if (UNITS_PER_WORD < 4)
7408 word = byte / UNITS_PER_WORD;
7409 if (WORDS_BIG_ENDIAN)
7410 word = (words - 1) - word;
7411 offset = word * UNITS_PER_WORD;
7412 if (BYTES_BIG_ENDIAN)
7413 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7415 offset += byte % UNITS_PER_WORD;
7418 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7419 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7421 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7424 real_from_target (&r, tmp, mode);
7425 return build_real (type, r);
7429 /* Subroutine of native_interpret_expr. Interpret the contents of
7430 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7431 If the buffer cannot be interpreted, return NULL_TREE. */
7434 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7436 tree etype, rpart, ipart;
7439 etype = TREE_TYPE (type);
7440 size = GET_MODE_SIZE (TYPE_MODE (etype));
7443 rpart = native_interpret_expr (etype, ptr, size);
7446 ipart = native_interpret_expr (etype, ptr+size, size);
7449 return build_complex (type, rpart, ipart);
7453 /* Subroutine of native_interpret_expr. Interpret the contents of
7454 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7455 If the buffer cannot be interpreted, return NULL_TREE. */
7458 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7460 tree etype, elem, elements;
7463 etype = TREE_TYPE (type);
7464 size = GET_MODE_SIZE (TYPE_MODE (etype));
7465 count = TYPE_VECTOR_SUBPARTS (type);
7466 if (size * count > len)
7469 elements = NULL_TREE;
7470 for (i = count - 1; i >= 0; i--)
7472 elem = native_interpret_expr (etype, ptr+(i*size), size);
7475 elements = tree_cons (NULL_TREE, elem, elements);
7477 return build_vector (type, elements);
7481 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7482 the buffer PTR of length LEN as a constant of type TYPE. For
7483 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7484 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7485 return NULL_TREE. */
7488 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7490 switch (TREE_CODE (type))
7495 return native_interpret_int (type, ptr, len);
7498 return native_interpret_real (type, ptr, len);
7501 return native_interpret_complex (type, ptr, len);
7504 return native_interpret_vector (type, ptr, len);
7512 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7513 TYPE at compile-time. If we're unable to perform the conversion
7514 return NULL_TREE. */
7517 fold_view_convert_expr (tree type, tree expr)
7519 /* We support up to 512-bit values (for V8DFmode). */
7520 unsigned char buffer[64];
7523 /* Check that the host and target are sane. */
7524 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7527 len = native_encode_expr (expr, buffer, sizeof (buffer));
7531 return native_interpret_expr (type, buffer, len);
7534 /* Build an expression for the address of T. Folds away INDIRECT_REF
7535 to avoid confusing the gimplify process. */
7538 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7540 /* The size of the object is not relevant when talking about its address. */
7541 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7542 t = TREE_OPERAND (t, 0);
7544 if (TREE_CODE (t) == INDIRECT_REF)
7546 t = TREE_OPERAND (t, 0);
7548 if (TREE_TYPE (t) != ptrtype)
7549 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7551 else if (TREE_CODE (t) == MEM_REF
7552 && integer_zerop (TREE_OPERAND (t, 1)))
7553 return TREE_OPERAND (t, 0);
7554 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7556 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7558 if (TREE_TYPE (t) != ptrtype)
7559 t = fold_convert_loc (loc, ptrtype, t);
7562 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7567 /* Build an expression for the address of T. */
7570 build_fold_addr_expr_loc (location_t loc, tree t)
7572 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7574 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7577 /* Fold a unary expression of code CODE and type TYPE with operand
7578 OP0. Return the folded expression if folding is successful.
7579 Otherwise, return NULL_TREE. */
7582 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7586 enum tree_code_class kind = TREE_CODE_CLASS (code);
7588 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7589 && TREE_CODE_LENGTH (code) == 1);
7594 if (CONVERT_EXPR_CODE_P (code)
7595 || code == FLOAT_EXPR || code == ABS_EXPR)
7597 /* Don't use STRIP_NOPS, because signedness of argument type
7599 STRIP_SIGN_NOPS (arg0);
7603 /* Strip any conversions that don't change the mode. This
7604 is safe for every expression, except for a comparison
7605 expression because its signedness is derived from its
7608 Note that this is done as an internal manipulation within
7609 the constant folder, in order to find the simplest
7610 representation of the arguments so that their form can be
7611 studied. In any cases, the appropriate type conversions
7612 should be put back in the tree that will get out of the
7618 if (TREE_CODE_CLASS (code) == tcc_unary)
7620 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7621 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7622 fold_build1_loc (loc, code, type,
7623 fold_convert_loc (loc, TREE_TYPE (op0),
7624 TREE_OPERAND (arg0, 1))));
7625 else if (TREE_CODE (arg0) == COND_EXPR)
7627 tree arg01 = TREE_OPERAND (arg0, 1);
7628 tree arg02 = TREE_OPERAND (arg0, 2);
7629 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7630 arg01 = fold_build1_loc (loc, code, type,
7631 fold_convert_loc (loc,
7632 TREE_TYPE (op0), arg01));
7633 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7634 arg02 = fold_build1_loc (loc, code, type,
7635 fold_convert_loc (loc,
7636 TREE_TYPE (op0), arg02));
7637 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7640 /* If this was a conversion, and all we did was to move into
7641 inside the COND_EXPR, bring it back out. But leave it if
7642 it is a conversion from integer to integer and the
7643 result precision is no wider than a word since such a
7644 conversion is cheap and may be optimized away by combine,
7645 while it couldn't if it were outside the COND_EXPR. Then return
7646 so we don't get into an infinite recursion loop taking the
7647 conversion out and then back in. */
7649 if ((CONVERT_EXPR_CODE_P (code)
7650 || code == NON_LVALUE_EXPR)
7651 && TREE_CODE (tem) == COND_EXPR
7652 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7653 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7654 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7655 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7656 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7657 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7658 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7660 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7661 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7662 || flag_syntax_only))
7663 tem = build1_loc (loc, code, type,
7665 TREE_TYPE (TREE_OPERAND
7666 (TREE_OPERAND (tem, 1), 0)),
7667 TREE_OPERAND (tem, 0),
7668 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7669 TREE_OPERAND (TREE_OPERAND (tem, 2),
7673 else if (COMPARISON_CLASS_P (arg0))
7675 if (TREE_CODE (type) == BOOLEAN_TYPE)
7677 arg0 = copy_node (arg0);
7678 TREE_TYPE (arg0) = type;
7681 else if (TREE_CODE (type) != INTEGER_TYPE)
7682 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7683 fold_build1_loc (loc, code, type,
7685 fold_build1_loc (loc, code, type,
7686 integer_zero_node));
7693 /* Re-association barriers around constants and other re-association
7694 barriers can be removed. */
7695 if (CONSTANT_CLASS_P (op0)
7696 || TREE_CODE (op0) == PAREN_EXPR)
7697 return fold_convert_loc (loc, type, op0);
7702 case FIX_TRUNC_EXPR:
7703 if (TREE_TYPE (op0) == type)
7706 /* If we have (type) (a CMP b) and type is an integral type, return
7707 new expression involving the new type. */
7708 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7709 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7710 TREE_OPERAND (op0, 1));
7712 /* Handle cases of two conversions in a row. */
7713 if (CONVERT_EXPR_P (op0))
7715 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7716 tree inter_type = TREE_TYPE (op0);
7717 int inside_int = INTEGRAL_TYPE_P (inside_type);
7718 int inside_ptr = POINTER_TYPE_P (inside_type);
7719 int inside_float = FLOAT_TYPE_P (inside_type);
7720 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7721 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7722 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7723 int inter_int = INTEGRAL_TYPE_P (inter_type);
7724 int inter_ptr = POINTER_TYPE_P (inter_type);
7725 int inter_float = FLOAT_TYPE_P (inter_type);
7726 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7727 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7728 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7729 int final_int = INTEGRAL_TYPE_P (type);
7730 int final_ptr = POINTER_TYPE_P (type);
7731 int final_float = FLOAT_TYPE_P (type);
7732 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7733 unsigned int final_prec = TYPE_PRECISION (type);
7734 int final_unsignedp = TYPE_UNSIGNED (type);
7736 /* In addition to the cases of two conversions in a row
7737 handled below, if we are converting something to its own
7738 type via an object of identical or wider precision, neither
7739 conversion is needed. */
7740 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7741 && (((inter_int || inter_ptr) && final_int)
7742 || (inter_float && final_float))
7743 && inter_prec >= final_prec)
7744 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7746 /* Likewise, if the intermediate and initial types are either both
7747 float or both integer, we don't need the middle conversion if the
7748 former is wider than the latter and doesn't change the signedness
7749 (for integers). Avoid this if the final type is a pointer since
7750 then we sometimes need the middle conversion. Likewise if the
7751 final type has a precision not equal to the size of its mode. */
7752 if (((inter_int && inside_int)
7753 || (inter_float && inside_float)
7754 || (inter_vec && inside_vec))
7755 && inter_prec >= inside_prec
7756 && (inter_float || inter_vec
7757 || inter_unsignedp == inside_unsignedp)
7758 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7759 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7761 && (! final_vec || inter_prec == inside_prec))
7762 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7764 /* If we have a sign-extension of a zero-extended value, we can
7765 replace that by a single zero-extension. */
7766 if (inside_int && inter_int && final_int
7767 && inside_prec < inter_prec && inter_prec < final_prec
7768 && inside_unsignedp && !inter_unsignedp)
7769 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7771 /* Two conversions in a row are not needed unless:
7772 - some conversion is floating-point (overstrict for now), or
7773 - some conversion is a vector (overstrict for now), or
7774 - the intermediate type is narrower than both initial and
7776 - the intermediate type and innermost type differ in signedness,
7777 and the outermost type is wider than the intermediate, or
7778 - the initial type is a pointer type and the precisions of the
7779 intermediate and final types differ, or
7780 - the final type is a pointer type and the precisions of the
7781 initial and intermediate types differ. */
7782 if (! inside_float && ! inter_float && ! final_float
7783 && ! inside_vec && ! inter_vec && ! final_vec
7784 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7785 && ! (inside_int && inter_int
7786 && inter_unsignedp != inside_unsignedp
7787 && inter_prec < final_prec)
7788 && ((inter_unsignedp && inter_prec > inside_prec)
7789 == (final_unsignedp && final_prec > inter_prec))
7790 && ! (inside_ptr && inter_prec != final_prec)
7791 && ! (final_ptr && inside_prec != inter_prec)
7792 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7793 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7794 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7797 /* Handle (T *)&A.B.C for A being of type T and B and C
7798 living at offset zero. This occurs frequently in
7799 C++ upcasting and then accessing the base. */
7800 if (TREE_CODE (op0) == ADDR_EXPR
7801 && POINTER_TYPE_P (type)
7802 && handled_component_p (TREE_OPERAND (op0, 0)))
7804 HOST_WIDE_INT bitsize, bitpos;
7806 enum machine_mode mode;
7807 int unsignedp, volatilep;
7808 tree base = TREE_OPERAND (op0, 0);
7809 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7810 &mode, &unsignedp, &volatilep, false);
7811 /* If the reference was to a (constant) zero offset, we can use
7812 the address of the base if it has the same base type
7813 as the result type and the pointer type is unqualified. */
7814 if (! offset && bitpos == 0
7815 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7816 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7817 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7818 return fold_convert_loc (loc, type,
7819 build_fold_addr_expr_loc (loc, base));
7822 if (TREE_CODE (op0) == MODIFY_EXPR
7823 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7824 /* Detect assigning a bitfield. */
7825 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7827 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7829 /* Don't leave an assignment inside a conversion
7830 unless assigning a bitfield. */
7831 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7832 /* First do the assignment, then return converted constant. */
7833 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7834 TREE_NO_WARNING (tem) = 1;
7835 TREE_USED (tem) = 1;
7839 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7840 constants (if x has signed type, the sign bit cannot be set
7841 in c). This folds extension into the BIT_AND_EXPR.
7842 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7843 very likely don't have maximal range for their precision and this
7844 transformation effectively doesn't preserve non-maximal ranges. */
7845 if (TREE_CODE (type) == INTEGER_TYPE
7846 && TREE_CODE (op0) == BIT_AND_EXPR
7847 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7849 tree and_expr = op0;
7850 tree and0 = TREE_OPERAND (and_expr, 0);
7851 tree and1 = TREE_OPERAND (and_expr, 1);
7854 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7855 || (TYPE_PRECISION (type)
7856 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7858 else if (TYPE_PRECISION (TREE_TYPE (and1))
7859 <= HOST_BITS_PER_WIDE_INT
7860 && host_integerp (and1, 1))
7862 unsigned HOST_WIDE_INT cst;
7864 cst = tree_low_cst (and1, 1);
7865 cst &= (HOST_WIDE_INT) -1
7866 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7867 change = (cst == 0);
7868 #ifdef LOAD_EXTEND_OP
7870 && !flag_syntax_only
7871 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7874 tree uns = unsigned_type_for (TREE_TYPE (and0));
7875 and0 = fold_convert_loc (loc, uns, and0);
7876 and1 = fold_convert_loc (loc, uns, and1);
7882 tem = force_fit_type_double (type, tree_to_double_int (and1),
7883 0, TREE_OVERFLOW (and1));
7884 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7885 fold_convert_loc (loc, type, and0), tem);
7889 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7890 when one of the new casts will fold away. Conservatively we assume
7891 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7892 if (POINTER_TYPE_P (type)
7893 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7894 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7895 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7896 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7898 tree arg00 = TREE_OPERAND (arg0, 0);
7899 tree arg01 = TREE_OPERAND (arg0, 1);
7901 return fold_build2_loc (loc,
7902 TREE_CODE (arg0), type,
7903 fold_convert_loc (loc, type, arg00),
7904 fold_convert_loc (loc, sizetype, arg01));
7907 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7908 of the same precision, and X is an integer type not narrower than
7909 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7910 if (INTEGRAL_TYPE_P (type)
7911 && TREE_CODE (op0) == BIT_NOT_EXPR
7912 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7913 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7914 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7916 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7917 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7918 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7919 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7920 fold_convert_loc (loc, type, tem));
7923 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7924 type of X and Y (integer types only). */
7925 if (INTEGRAL_TYPE_P (type)
7926 && TREE_CODE (op0) == MULT_EXPR
7927 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7928 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7930 /* Be careful not to introduce new overflows. */
7932 if (TYPE_OVERFLOW_WRAPS (type))
7935 mult_type = unsigned_type_for (type);
7937 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7939 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7940 fold_convert_loc (loc, mult_type,
7941 TREE_OPERAND (op0, 0)),
7942 fold_convert_loc (loc, mult_type,
7943 TREE_OPERAND (op0, 1)));
7944 return fold_convert_loc (loc, type, tem);
7948 tem = fold_convert_const (code, type, op0);
7949 return tem ? tem : NULL_TREE;
7951 case ADDR_SPACE_CONVERT_EXPR:
7952 if (integer_zerop (arg0))
7953 return fold_convert_const (code, type, arg0);
7956 case FIXED_CONVERT_EXPR:
7957 tem = fold_convert_const (code, type, arg0);
7958 return tem ? tem : NULL_TREE;
7960 case VIEW_CONVERT_EXPR:
7961 if (TREE_TYPE (op0) == type)
7963 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7964 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7965 type, TREE_OPERAND (op0, 0));
7966 if (TREE_CODE (op0) == MEM_REF)
7967 return fold_build2_loc (loc, MEM_REF, type,
7968 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7970 /* For integral conversions with the same precision or pointer
7971 conversions use a NOP_EXPR instead. */
7972 if ((INTEGRAL_TYPE_P (type)
7973 || POINTER_TYPE_P (type))
7974 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7975 || POINTER_TYPE_P (TREE_TYPE (op0)))
7976 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7977 return fold_convert_loc (loc, type, op0);
7979 /* Strip inner integral conversions that do not change the precision. */
7980 if (CONVERT_EXPR_P (op0)
7981 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7982 || POINTER_TYPE_P (TREE_TYPE (op0)))
7983 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7984 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7985 && (TYPE_PRECISION (TREE_TYPE (op0))
7986 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7987 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7988 type, TREE_OPERAND (op0, 0));
7990 return fold_view_convert_expr (type, op0);
7993 tem = fold_negate_expr (loc, arg0);
7995 return fold_convert_loc (loc, type, tem);
7999 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8000 return fold_abs_const (arg0, type);
8001 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8002 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8003 /* Convert fabs((double)float) into (double)fabsf(float). */
8004 else if (TREE_CODE (arg0) == NOP_EXPR
8005 && TREE_CODE (type) == REAL_TYPE)
8007 tree targ0 = strip_float_extensions (arg0);
8009 return fold_convert_loc (loc, type,
8010 fold_build1_loc (loc, ABS_EXPR,
8014 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8015 else if (TREE_CODE (arg0) == ABS_EXPR)
8017 else if (tree_expr_nonnegative_p (arg0))
8020 /* Strip sign ops from argument. */
8021 if (TREE_CODE (type) == REAL_TYPE)
8023 tem = fold_strip_sign_ops (arg0);
8025 return fold_build1_loc (loc, ABS_EXPR, type,
8026 fold_convert_loc (loc, type, tem));
8031 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8032 return fold_convert_loc (loc, type, arg0);
8033 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8035 tree itype = TREE_TYPE (type);
8036 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8037 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8038 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8039 negate_expr (ipart));
8041 if (TREE_CODE (arg0) == COMPLEX_CST)
8043 tree itype = TREE_TYPE (type);
8044 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8045 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8046 return build_complex (type, rpart, negate_expr (ipart));
8048 if (TREE_CODE (arg0) == CONJ_EXPR)
8049 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8053 if (TREE_CODE (arg0) == INTEGER_CST)
8054 return fold_not_const (arg0, type);
8055 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8056 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8057 /* Convert ~ (-A) to A - 1. */
8058 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8059 return fold_build2_loc (loc, MINUS_EXPR, type,
8060 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8061 build_int_cst (type, 1));
8062 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8063 else if (INTEGRAL_TYPE_P (type)
8064 && ((TREE_CODE (arg0) == MINUS_EXPR
8065 && integer_onep (TREE_OPERAND (arg0, 1)))
8066 || (TREE_CODE (arg0) == PLUS_EXPR
8067 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8068 return fold_build1_loc (loc, NEGATE_EXPR, type,
8069 fold_convert_loc (loc, type,
8070 TREE_OPERAND (arg0, 0)));
8071 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8072 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8073 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8074 fold_convert_loc (loc, type,
8075 TREE_OPERAND (arg0, 0)))))
8076 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8077 fold_convert_loc (loc, type,
8078 TREE_OPERAND (arg0, 1)));
8079 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8080 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8081 fold_convert_loc (loc, type,
8082 TREE_OPERAND (arg0, 1)))))
8083 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8084 fold_convert_loc (loc, type,
8085 TREE_OPERAND (arg0, 0)), tem);
8086 /* Perform BIT_NOT_EXPR on each element individually. */
8087 else if (TREE_CODE (arg0) == VECTOR_CST)
8089 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8090 int count = TYPE_VECTOR_SUBPARTS (type), i;
8092 for (i = 0; i < count; i++)
8096 elem = TREE_VALUE (elements);
8097 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8098 if (elem == NULL_TREE)
8100 elements = TREE_CHAIN (elements);
8103 elem = build_int_cst (TREE_TYPE (type), -1);
8104 list = tree_cons (NULL_TREE, elem, list);
8107 return build_vector (type, nreverse (list));
8112 case TRUTH_NOT_EXPR:
8113 /* The argument to invert_truthvalue must have Boolean type. */
8114 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8115 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8117 /* Note that the operand of this must be an int
8118 and its values must be 0 or 1.
8119 ("true" is a fixed value perhaps depending on the language,
8120 but we don't handle values other than 1 correctly yet.) */
8121 tem = fold_truth_not_expr (loc, arg0);
8124 return fold_convert_loc (loc, type, tem);
8127 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8128 return fold_convert_loc (loc, type, arg0);
8129 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8130 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8131 TREE_OPERAND (arg0, 1));
8132 if (TREE_CODE (arg0) == COMPLEX_CST)
8133 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8134 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8136 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8137 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8138 fold_build1_loc (loc, REALPART_EXPR, itype,
8139 TREE_OPERAND (arg0, 0)),
8140 fold_build1_loc (loc, REALPART_EXPR, itype,
8141 TREE_OPERAND (arg0, 1)));
8142 return fold_convert_loc (loc, type, tem);
8144 if (TREE_CODE (arg0) == CONJ_EXPR)
8146 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8147 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8148 TREE_OPERAND (arg0, 0));
8149 return fold_convert_loc (loc, type, tem);
8151 if (TREE_CODE (arg0) == CALL_EXPR)
8153 tree fn = get_callee_fndecl (arg0);
8154 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8155 switch (DECL_FUNCTION_CODE (fn))
8157 CASE_FLT_FN (BUILT_IN_CEXPI):
8158 fn = mathfn_built_in (type, BUILT_IN_COS);
8160 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8170 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8171 return build_zero_cst (type);
8172 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8173 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8174 TREE_OPERAND (arg0, 0));
8175 if (TREE_CODE (arg0) == COMPLEX_CST)
8176 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8177 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8179 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8180 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8181 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8182 TREE_OPERAND (arg0, 0)),
8183 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8184 TREE_OPERAND (arg0, 1)));
8185 return fold_convert_loc (loc, type, tem);
8187 if (TREE_CODE (arg0) == CONJ_EXPR)
8189 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8190 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8191 return fold_convert_loc (loc, type, negate_expr (tem));
8193 if (TREE_CODE (arg0) == CALL_EXPR)
8195 tree fn = get_callee_fndecl (arg0);
8196 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8197 switch (DECL_FUNCTION_CODE (fn))
8199 CASE_FLT_FN (BUILT_IN_CEXPI):
8200 fn = mathfn_built_in (type, BUILT_IN_SIN);
8202 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8212 /* Fold *&X to X if X is an lvalue. */
8213 if (TREE_CODE (op0) == ADDR_EXPR)
8215 tree op00 = TREE_OPERAND (op0, 0);
8216 if ((TREE_CODE (op00) == VAR_DECL
8217 || TREE_CODE (op00) == PARM_DECL
8218 || TREE_CODE (op00) == RESULT_DECL)
8219 && !TREE_READONLY (op00))
8226 } /* switch (code) */
8230 /* If the operation was a conversion do _not_ mark a resulting constant
8231 with TREE_OVERFLOW if the original constant was not. These conversions
8232 have implementation defined behavior and retaining the TREE_OVERFLOW
8233 flag here would confuse later passes such as VRP. */
8235 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8236 tree type, tree op0)
8238 tree res = fold_unary_loc (loc, code, type, op0);
8240 && TREE_CODE (res) == INTEGER_CST
8241 && TREE_CODE (op0) == INTEGER_CST
8242 && CONVERT_EXPR_CODE_P (code))
8243 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8248 /* Fold a binary expression of code CODE and type TYPE with operands
8249 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8250 Return the folded expression if folding is successful. Otherwise,
8251 return NULL_TREE. */
8254 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8256 enum tree_code compl_code;
8258 if (code == MIN_EXPR)
8259 compl_code = MAX_EXPR;
8260 else if (code == MAX_EXPR)
8261 compl_code = MIN_EXPR;
8265 /* MIN (MAX (a, b), b) == b. */
8266 if (TREE_CODE (op0) == compl_code
8267 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8268 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8270 /* MIN (MAX (b, a), b) == b. */
8271 if (TREE_CODE (op0) == compl_code
8272 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8273 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8274 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8276 /* MIN (a, MAX (a, b)) == a. */
8277 if (TREE_CODE (op1) == compl_code
8278 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8279 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8280 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8282 /* MIN (a, MAX (b, a)) == a. */
8283 if (TREE_CODE (op1) == compl_code
8284 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8285 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8286 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8291 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8292 by changing CODE to reduce the magnitude of constants involved in
8293 ARG0 of the comparison.
8294 Returns a canonicalized comparison tree if a simplification was
8295 possible, otherwise returns NULL_TREE.
8296 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8297 valid if signed overflow is undefined. */
8300 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8301 tree arg0, tree arg1,
8302 bool *strict_overflow_p)
8304 enum tree_code code0 = TREE_CODE (arg0);
8305 tree t, cst0 = NULL_TREE;
8309 /* Match A +- CST code arg1 and CST code arg1. We can change the
8310 first form only if overflow is undefined. */
8311 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8312 /* In principle pointers also have undefined overflow behavior,
8313 but that causes problems elsewhere. */
8314 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8315 && (code0 == MINUS_EXPR
8316 || code0 == PLUS_EXPR)
8317 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8318 || code0 == INTEGER_CST))
8321 /* Identify the constant in arg0 and its sign. */
8322 if (code0 == INTEGER_CST)
8325 cst0 = TREE_OPERAND (arg0, 1);
8326 sgn0 = tree_int_cst_sgn (cst0);
8328 /* Overflowed constants and zero will cause problems. */
8329 if (integer_zerop (cst0)
8330 || TREE_OVERFLOW (cst0))
8333 /* See if we can reduce the magnitude of the constant in
8334 arg0 by changing the comparison code. */
8335 if (code0 == INTEGER_CST)
8337 /* CST <= arg1 -> CST-1 < arg1. */
8338 if (code == LE_EXPR && sgn0 == 1)
8340 /* -CST < arg1 -> -CST-1 <= arg1. */
8341 else if (code == LT_EXPR && sgn0 == -1)
8343 /* CST > arg1 -> CST-1 >= arg1. */
8344 else if (code == GT_EXPR && sgn0 == 1)
8346 /* -CST >= arg1 -> -CST-1 > arg1. */
8347 else if (code == GE_EXPR && sgn0 == -1)
8351 /* arg1 code' CST' might be more canonical. */
8356 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8358 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8360 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8361 else if (code == GT_EXPR
8362 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8364 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8365 else if (code == LE_EXPR
8366 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8368 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8369 else if (code == GE_EXPR
8370 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8374 *strict_overflow_p = true;
8377 /* Now build the constant reduced in magnitude. But not if that
8378 would produce one outside of its types range. */
8379 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8381 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8382 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8384 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8385 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8386 /* We cannot swap the comparison here as that would cause us to
8387 endlessly recurse. */
8390 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8391 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8392 if (code0 != INTEGER_CST)
8393 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8395 /* If swapping might yield to a more canonical form, do so. */
8397 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8399 return fold_build2_loc (loc, code, type, t, arg1);
8402 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8403 overflow further. Try to decrease the magnitude of constants involved
8404 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8405 and put sole constants at the second argument position.
8406 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8409 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8410 tree arg0, tree arg1)
8413 bool strict_overflow_p;
8414 const char * const warnmsg = G_("assuming signed overflow does not occur "
8415 "when reducing constant in comparison");
8417 /* Try canonicalization by simplifying arg0. */
8418 strict_overflow_p = false;
8419 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8420 &strict_overflow_p);
8423 if (strict_overflow_p)
8424 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8428 /* Try canonicalization by simplifying arg1 using the swapped
8430 code = swap_tree_comparison (code);
8431 strict_overflow_p = false;
8432 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8433 &strict_overflow_p);
8434 if (t && strict_overflow_p)
8435 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8439 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8440 space. This is used to avoid issuing overflow warnings for
8441 expressions like &p->x which can not wrap. */
8444 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8446 unsigned HOST_WIDE_INT offset_low, total_low;
8447 HOST_WIDE_INT size, offset_high, total_high;
8449 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8455 if (offset == NULL_TREE)
8460 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8464 offset_low = TREE_INT_CST_LOW (offset);
8465 offset_high = TREE_INT_CST_HIGH (offset);
8468 if (add_double_with_sign (offset_low, offset_high,
8469 bitpos / BITS_PER_UNIT, 0,
8470 &total_low, &total_high,
8474 if (total_high != 0)
8477 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8481 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8483 if (TREE_CODE (base) == ADDR_EXPR)
8485 HOST_WIDE_INT base_size;
8487 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8488 if (base_size > 0 && size < base_size)
8492 return total_low > (unsigned HOST_WIDE_INT) size;
8495 /* Subroutine of fold_binary. This routine performs all of the
8496 transformations that are common to the equality/inequality
8497 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8498 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8499 fold_binary should call fold_binary. Fold a comparison with
8500 tree code CODE and type TYPE with operands OP0 and OP1. Return
8501 the folded comparison or NULL_TREE. */
8504 fold_comparison (location_t loc, enum tree_code code, tree type,
8507 tree arg0, arg1, tem;
8512 STRIP_SIGN_NOPS (arg0);
8513 STRIP_SIGN_NOPS (arg1);
8515 tem = fold_relational_const (code, type, arg0, arg1);
8516 if (tem != NULL_TREE)
8519 /* If one arg is a real or integer constant, put it last. */
8520 if (tree_swap_operands_p (arg0, arg1, true))
8521 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8523 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8524 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8525 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8526 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8527 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8528 && (TREE_CODE (arg1) == INTEGER_CST
8529 && !TREE_OVERFLOW (arg1)))
8531 tree const1 = TREE_OPERAND (arg0, 1);
8533 tree variable = TREE_OPERAND (arg0, 0);
8536 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8538 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8539 TREE_TYPE (arg1), const2, const1);
8541 /* If the constant operation overflowed this can be
8542 simplified as a comparison against INT_MAX/INT_MIN. */
8543 if (TREE_CODE (lhs) == INTEGER_CST
8544 && TREE_OVERFLOW (lhs))
8546 int const1_sgn = tree_int_cst_sgn (const1);
8547 enum tree_code code2 = code;
8549 /* Get the sign of the constant on the lhs if the
8550 operation were VARIABLE + CONST1. */
8551 if (TREE_CODE (arg0) == MINUS_EXPR)
8552 const1_sgn = -const1_sgn;
8554 /* The sign of the constant determines if we overflowed
8555 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8556 Canonicalize to the INT_MIN overflow by swapping the comparison
8558 if (const1_sgn == -1)
8559 code2 = swap_tree_comparison (code);
8561 /* We now can look at the canonicalized case
8562 VARIABLE + 1 CODE2 INT_MIN
8563 and decide on the result. */
8564 if (code2 == LT_EXPR
8566 || code2 == EQ_EXPR)
8567 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8568 else if (code2 == NE_EXPR
8570 || code2 == GT_EXPR)
8571 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8574 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8575 && (TREE_CODE (lhs) != INTEGER_CST
8576 || !TREE_OVERFLOW (lhs)))
8578 if (code != EQ_EXPR && code != NE_EXPR)
8579 fold_overflow_warning ("assuming signed overflow does not occur "
8580 "when changing X +- C1 cmp C2 to "
8582 WARN_STRICT_OVERFLOW_COMPARISON);
8583 return fold_build2_loc (loc, code, type, variable, lhs);
8587 /* For comparisons of pointers we can decompose it to a compile time
8588 comparison of the base objects and the offsets into the object.
8589 This requires at least one operand being an ADDR_EXPR or a
8590 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8591 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8592 && (TREE_CODE (arg0) == ADDR_EXPR
8593 || TREE_CODE (arg1) == ADDR_EXPR
8594 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8595 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8597 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8598 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8599 enum machine_mode mode;
8600 int volatilep, unsignedp;
8601 bool indirect_base0 = false, indirect_base1 = false;
8603 /* Get base and offset for the access. Strip ADDR_EXPR for
8604 get_inner_reference, but put it back by stripping INDIRECT_REF
8605 off the base object if possible. indirect_baseN will be true
8606 if baseN is not an address but refers to the object itself. */
8608 if (TREE_CODE (arg0) == ADDR_EXPR)
8610 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8611 &bitsize, &bitpos0, &offset0, &mode,
8612 &unsignedp, &volatilep, false);
8613 if (TREE_CODE (base0) == INDIRECT_REF)
8614 base0 = TREE_OPERAND (base0, 0);
8616 indirect_base0 = true;
8618 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8620 base0 = TREE_OPERAND (arg0, 0);
8621 STRIP_SIGN_NOPS (base0);
8622 if (TREE_CODE (base0) == ADDR_EXPR)
8624 base0 = TREE_OPERAND (base0, 0);
8625 indirect_base0 = true;
8627 offset0 = TREE_OPERAND (arg0, 1);
8631 if (TREE_CODE (arg1) == ADDR_EXPR)
8633 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8634 &bitsize, &bitpos1, &offset1, &mode,
8635 &unsignedp, &volatilep, false);
8636 if (TREE_CODE (base1) == INDIRECT_REF)
8637 base1 = TREE_OPERAND (base1, 0);
8639 indirect_base1 = true;
8641 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8643 base1 = TREE_OPERAND (arg1, 0);
8644 STRIP_SIGN_NOPS (base1);
8645 if (TREE_CODE (base1) == ADDR_EXPR)
8647 base1 = TREE_OPERAND (base1, 0);
8648 indirect_base1 = true;
8650 offset1 = TREE_OPERAND (arg1, 1);
8653 /* A local variable can never be pointed to by
8654 the default SSA name of an incoming parameter. */
8655 if ((TREE_CODE (arg0) == ADDR_EXPR
8657 && TREE_CODE (base0) == VAR_DECL
8658 && auto_var_in_fn_p (base0, current_function_decl)
8660 && TREE_CODE (base1) == SSA_NAME
8661 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8662 && SSA_NAME_IS_DEFAULT_DEF (base1))
8663 || (TREE_CODE (arg1) == ADDR_EXPR
8665 && TREE_CODE (base1) == VAR_DECL
8666 && auto_var_in_fn_p (base1, current_function_decl)
8668 && TREE_CODE (base0) == SSA_NAME
8669 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8670 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8672 if (code == NE_EXPR)
8673 return constant_boolean_node (1, type);
8674 else if (code == EQ_EXPR)
8675 return constant_boolean_node (0, type);
8677 /* If we have equivalent bases we might be able to simplify. */
8678 else if (indirect_base0 == indirect_base1
8679 && operand_equal_p (base0, base1, 0))
8681 /* We can fold this expression to a constant if the non-constant
8682 offset parts are equal. */
8683 if ((offset0 == offset1
8684 || (offset0 && offset1
8685 && operand_equal_p (offset0, offset1, 0)))
8688 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8693 && bitpos0 != bitpos1
8694 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8695 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8696 fold_overflow_warning (("assuming pointer wraparound does not "
8697 "occur when comparing P +- C1 with "
8699 WARN_STRICT_OVERFLOW_CONDITIONAL);
8704 return constant_boolean_node (bitpos0 == bitpos1, type);
8706 return constant_boolean_node (bitpos0 != bitpos1, type);
8708 return constant_boolean_node (bitpos0 < bitpos1, type);
8710 return constant_boolean_node (bitpos0 <= bitpos1, type);
8712 return constant_boolean_node (bitpos0 >= bitpos1, type);
8714 return constant_boolean_node (bitpos0 > bitpos1, type);
8718 /* We can simplify the comparison to a comparison of the variable
8719 offset parts if the constant offset parts are equal.
8720 Be careful to use signed size type here because otherwise we
8721 mess with array offsets in the wrong way. This is possible
8722 because pointer arithmetic is restricted to retain within an
8723 object and overflow on pointer differences is undefined as of
8724 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8725 else if (bitpos0 == bitpos1
8726 && ((code == EQ_EXPR || code == NE_EXPR)
8727 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8729 /* By converting to signed size type we cover middle-end pointer
8730 arithmetic which operates on unsigned pointer types of size
8731 type size and ARRAY_REF offsets which are properly sign or
8732 zero extended from their type in case it is narrower than
8734 if (offset0 == NULL_TREE)
8735 offset0 = build_int_cst (ssizetype, 0);
8737 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8738 if (offset1 == NULL_TREE)
8739 offset1 = build_int_cst (ssizetype, 0);
8741 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8745 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8746 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8747 fold_overflow_warning (("assuming pointer wraparound does not "
8748 "occur when comparing P +- C1 with "
8750 WARN_STRICT_OVERFLOW_COMPARISON);
8752 return fold_build2_loc (loc, code, type, offset0, offset1);
8755 /* For non-equal bases we can simplify if they are addresses
8756 of local binding decls or constants. */
8757 else if (indirect_base0 && indirect_base1
8758 /* We know that !operand_equal_p (base0, base1, 0)
8759 because the if condition was false. But make
8760 sure two decls are not the same. */
8762 && TREE_CODE (arg0) == ADDR_EXPR
8763 && TREE_CODE (arg1) == ADDR_EXPR
8764 && (((TREE_CODE (base0) == VAR_DECL
8765 || TREE_CODE (base0) == PARM_DECL)
8766 && (targetm.binds_local_p (base0)
8767 || CONSTANT_CLASS_P (base1)))
8768 || CONSTANT_CLASS_P (base0))
8769 && (((TREE_CODE (base1) == VAR_DECL
8770 || TREE_CODE (base1) == PARM_DECL)
8771 && (targetm.binds_local_p (base1)
8772 || CONSTANT_CLASS_P (base0)))
8773 || CONSTANT_CLASS_P (base1)))
8775 if (code == EQ_EXPR)
8776 return omit_two_operands_loc (loc, type, boolean_false_node,
8778 else if (code == NE_EXPR)
8779 return omit_two_operands_loc (loc, type, boolean_true_node,
8782 /* For equal offsets we can simplify to a comparison of the
8784 else if (bitpos0 == bitpos1
8786 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8788 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8789 && ((offset0 == offset1)
8790 || (offset0 && offset1
8791 && operand_equal_p (offset0, offset1, 0))))
8794 base0 = build_fold_addr_expr_loc (loc, base0);
8796 base1 = build_fold_addr_expr_loc (loc, base1);
8797 return fold_build2_loc (loc, code, type, base0, base1);
8801 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8802 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8803 the resulting offset is smaller in absolute value than the
8805 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8806 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8807 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8808 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8809 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8810 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8811 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8813 tree const1 = TREE_OPERAND (arg0, 1);
8814 tree const2 = TREE_OPERAND (arg1, 1);
8815 tree variable1 = TREE_OPERAND (arg0, 0);
8816 tree variable2 = TREE_OPERAND (arg1, 0);
8818 const char * const warnmsg = G_("assuming signed overflow does not "
8819 "occur when combining constants around "
8822 /* Put the constant on the side where it doesn't overflow and is
8823 of lower absolute value than before. */
8824 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8825 ? MINUS_EXPR : PLUS_EXPR,
8827 if (!TREE_OVERFLOW (cst)
8828 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8830 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8831 return fold_build2_loc (loc, code, type,
8833 fold_build2_loc (loc,
8834 TREE_CODE (arg1), TREE_TYPE (arg1),
8838 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8839 ? MINUS_EXPR : PLUS_EXPR,
8841 if (!TREE_OVERFLOW (cst)
8842 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8844 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8845 return fold_build2_loc (loc, code, type,
8846 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8852 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8853 signed arithmetic case. That form is created by the compiler
8854 often enough for folding it to be of value. One example is in
8855 computing loop trip counts after Operator Strength Reduction. */
8856 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8857 && TREE_CODE (arg0) == MULT_EXPR
8858 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8859 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8860 && integer_zerop (arg1))
8862 tree const1 = TREE_OPERAND (arg0, 1);
8863 tree const2 = arg1; /* zero */
8864 tree variable1 = TREE_OPERAND (arg0, 0);
8865 enum tree_code cmp_code = code;
8867 /* Handle unfolded multiplication by zero. */
8868 if (integer_zerop (const1))
8869 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8871 fold_overflow_warning (("assuming signed overflow does not occur when "
8872 "eliminating multiplication in comparison "
8874 WARN_STRICT_OVERFLOW_COMPARISON);
8876 /* If const1 is negative we swap the sense of the comparison. */
8877 if (tree_int_cst_sgn (const1) < 0)
8878 cmp_code = swap_tree_comparison (cmp_code);
8880 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8883 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8887 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8889 tree targ0 = strip_float_extensions (arg0);
8890 tree targ1 = strip_float_extensions (arg1);
8891 tree newtype = TREE_TYPE (targ0);
8893 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8894 newtype = TREE_TYPE (targ1);
8896 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8897 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8898 return fold_build2_loc (loc, code, type,
8899 fold_convert_loc (loc, newtype, targ0),
8900 fold_convert_loc (loc, newtype, targ1));
8902 /* (-a) CMP (-b) -> b CMP a */
8903 if (TREE_CODE (arg0) == NEGATE_EXPR
8904 && TREE_CODE (arg1) == NEGATE_EXPR)
8905 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8906 TREE_OPERAND (arg0, 0));
8908 if (TREE_CODE (arg1) == REAL_CST)
8910 REAL_VALUE_TYPE cst;
8911 cst = TREE_REAL_CST (arg1);
8913 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8914 if (TREE_CODE (arg0) == NEGATE_EXPR)
8915 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8916 TREE_OPERAND (arg0, 0),
8917 build_real (TREE_TYPE (arg1),
8918 real_value_negate (&cst)));
8920 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8921 /* a CMP (-0) -> a CMP 0 */
8922 if (REAL_VALUE_MINUS_ZERO (cst))
8923 return fold_build2_loc (loc, code, type, arg0,
8924 build_real (TREE_TYPE (arg1), dconst0));
8926 /* x != NaN is always true, other ops are always false. */
8927 if (REAL_VALUE_ISNAN (cst)
8928 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8930 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8931 return omit_one_operand_loc (loc, type, tem, arg0);
8934 /* Fold comparisons against infinity. */
8935 if (REAL_VALUE_ISINF (cst)
8936 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8938 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8939 if (tem != NULL_TREE)
8944 /* If this is a comparison of a real constant with a PLUS_EXPR
8945 or a MINUS_EXPR of a real constant, we can convert it into a
8946 comparison with a revised real constant as long as no overflow
8947 occurs when unsafe_math_optimizations are enabled. */
8948 if (flag_unsafe_math_optimizations
8949 && TREE_CODE (arg1) == REAL_CST
8950 && (TREE_CODE (arg0) == PLUS_EXPR
8951 || TREE_CODE (arg0) == MINUS_EXPR)
8952 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8953 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8954 ? MINUS_EXPR : PLUS_EXPR,
8955 arg1, TREE_OPERAND (arg0, 1)))
8956 && !TREE_OVERFLOW (tem))
8957 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8959 /* Likewise, we can simplify a comparison of a real constant with
8960 a MINUS_EXPR whose first operand is also a real constant, i.e.
8961 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8962 floating-point types only if -fassociative-math is set. */
8963 if (flag_associative_math
8964 && TREE_CODE (arg1) == REAL_CST
8965 && TREE_CODE (arg0) == MINUS_EXPR
8966 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8967 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8969 && !TREE_OVERFLOW (tem))
8970 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8971 TREE_OPERAND (arg0, 1), tem);
8973 /* Fold comparisons against built-in math functions. */
8974 if (TREE_CODE (arg1) == REAL_CST
8975 && flag_unsafe_math_optimizations
8976 && ! flag_errno_math)
8978 enum built_in_function fcode = builtin_mathfn_code (arg0);
8980 if (fcode != END_BUILTINS)
8982 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8983 if (tem != NULL_TREE)
8989 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8990 && CONVERT_EXPR_P (arg0))
8992 /* If we are widening one operand of an integer comparison,
8993 see if the other operand is similarly being widened. Perhaps we
8994 can do the comparison in the narrower type. */
8995 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8999 /* Or if we are changing signedness. */
9000 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9005 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9006 constant, we can simplify it. */
9007 if (TREE_CODE (arg1) == INTEGER_CST
9008 && (TREE_CODE (arg0) == MIN_EXPR
9009 || TREE_CODE (arg0) == MAX_EXPR)
9010 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9012 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9017 /* Simplify comparison of something with itself. (For IEEE
9018 floating-point, we can only do some of these simplifications.) */
9019 if (operand_equal_p (arg0, arg1, 0))
9024 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9025 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9026 return constant_boolean_node (1, type);
9031 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9032 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9033 return constant_boolean_node (1, type);
9034 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9037 /* For NE, we can only do this simplification if integer
9038 or we don't honor IEEE floating point NaNs. */
9039 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9040 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9042 /* ... fall through ... */
9045 return constant_boolean_node (0, type);
9051 /* If we are comparing an expression that just has comparisons
9052 of two integer values, arithmetic expressions of those comparisons,
9053 and constants, we can simplify it. There are only three cases
9054 to check: the two values can either be equal, the first can be
9055 greater, or the second can be greater. Fold the expression for
9056 those three values. Since each value must be 0 or 1, we have
9057 eight possibilities, each of which corresponds to the constant 0
9058 or 1 or one of the six possible comparisons.
9060 This handles common cases like (a > b) == 0 but also handles
9061 expressions like ((x > y) - (y > x)) > 0, which supposedly
9062 occur in macroized code. */
9064 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9066 tree cval1 = 0, cval2 = 0;
9069 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9070 /* Don't handle degenerate cases here; they should already
9071 have been handled anyway. */
9072 && cval1 != 0 && cval2 != 0
9073 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9074 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9075 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9076 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9077 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9078 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9079 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9081 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9082 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9084 /* We can't just pass T to eval_subst in case cval1 or cval2
9085 was the same as ARG1. */
9088 = fold_build2_loc (loc, code, type,
9089 eval_subst (loc, arg0, cval1, maxval,
9093 = fold_build2_loc (loc, code, type,
9094 eval_subst (loc, arg0, cval1, maxval,
9098 = fold_build2_loc (loc, code, type,
9099 eval_subst (loc, arg0, cval1, minval,
9103 /* All three of these results should be 0 or 1. Confirm they are.
9104 Then use those values to select the proper code to use. */
9106 if (TREE_CODE (high_result) == INTEGER_CST
9107 && TREE_CODE (equal_result) == INTEGER_CST
9108 && TREE_CODE (low_result) == INTEGER_CST)
9110 /* Make a 3-bit mask with the high-order bit being the
9111 value for `>', the next for '=', and the low for '<'. */
9112 switch ((integer_onep (high_result) * 4)
9113 + (integer_onep (equal_result) * 2)
9114 + integer_onep (low_result))
9118 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9139 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9144 tem = save_expr (build2 (code, type, cval1, cval2));
9145 SET_EXPR_LOCATION (tem, loc);
9148 return fold_build2_loc (loc, code, type, cval1, cval2);
9153 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9154 into a single range test. */
9155 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9156 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9157 && TREE_CODE (arg1) == INTEGER_CST
9158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9159 && !integer_zerop (TREE_OPERAND (arg0, 1))
9160 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9161 && !TREE_OVERFLOW (arg1))
9163 tem = fold_div_compare (loc, code, type, arg0, arg1);
9164 if (tem != NULL_TREE)
9168 /* Fold ~X op ~Y as Y op X. */
9169 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9170 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9172 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9173 return fold_build2_loc (loc, code, type,
9174 fold_convert_loc (loc, cmp_type,
9175 TREE_OPERAND (arg1, 0)),
9176 TREE_OPERAND (arg0, 0));
9179 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9180 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9181 && TREE_CODE (arg1) == INTEGER_CST)
9183 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9184 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9185 TREE_OPERAND (arg0, 0),
9186 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9187 fold_convert_loc (loc, cmp_type, arg1)));
9194 /* Subroutine of fold_binary. Optimize complex multiplications of the
9195 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9196 argument EXPR represents the expression "z" of type TYPE. */
9199 fold_mult_zconjz (location_t loc, tree type, tree expr)
9201 tree itype = TREE_TYPE (type);
9202 tree rpart, ipart, tem;
9204 if (TREE_CODE (expr) == COMPLEX_EXPR)
9206 rpart = TREE_OPERAND (expr, 0);
9207 ipart = TREE_OPERAND (expr, 1);
9209 else if (TREE_CODE (expr) == COMPLEX_CST)
9211 rpart = TREE_REALPART (expr);
9212 ipart = TREE_IMAGPART (expr);
9216 expr = save_expr (expr);
9217 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9218 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9221 rpart = save_expr (rpart);
9222 ipart = save_expr (ipart);
9223 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9224 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9225 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9226 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9227 build_zero_cst (itype));
9231 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9232 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9233 guarantees that P and N have the same least significant log2(M) bits.
9234 N is not otherwise constrained. In particular, N is not normalized to
9235 0 <= N < M as is common. In general, the precise value of P is unknown.
9236 M is chosen as large as possible such that constant N can be determined.
9238 Returns M and sets *RESIDUE to N.
9240 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9241 account. This is not always possible due to PR 35705.
9244 static unsigned HOST_WIDE_INT
9245 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9246 bool allow_func_align)
9248 enum tree_code code;
9252 code = TREE_CODE (expr);
9253 if (code == ADDR_EXPR)
9255 expr = TREE_OPERAND (expr, 0);
9256 if (handled_component_p (expr))
9258 HOST_WIDE_INT bitsize, bitpos;
9260 enum machine_mode mode;
9261 int unsignedp, volatilep;
9263 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9264 &mode, &unsignedp, &volatilep, false);
9265 *residue = bitpos / BITS_PER_UNIT;
9268 if (TREE_CODE (offset) == INTEGER_CST)
9269 *residue += TREE_INT_CST_LOW (offset);
9271 /* We don't handle more complicated offset expressions. */
9277 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9278 return DECL_ALIGN_UNIT (expr);
9280 else if (code == POINTER_PLUS_EXPR)
9283 unsigned HOST_WIDE_INT modulus;
9284 enum tree_code inner_code;
9286 op0 = TREE_OPERAND (expr, 0);
9288 modulus = get_pointer_modulus_and_residue (op0, residue,
9291 op1 = TREE_OPERAND (expr, 1);
9293 inner_code = TREE_CODE (op1);
9294 if (inner_code == INTEGER_CST)
9296 *residue += TREE_INT_CST_LOW (op1);
9299 else if (inner_code == MULT_EXPR)
9301 op1 = TREE_OPERAND (op1, 1);
9302 if (TREE_CODE (op1) == INTEGER_CST)
9304 unsigned HOST_WIDE_INT align;
9306 /* Compute the greatest power-of-2 divisor of op1. */
9307 align = TREE_INT_CST_LOW (op1);
9310 /* If align is non-zero and less than *modulus, replace
9311 *modulus with align., If align is 0, then either op1 is 0
9312 or the greatest power-of-2 divisor of op1 doesn't fit in an
9313 unsigned HOST_WIDE_INT. In either case, no additional
9314 constraint is imposed. */
9316 modulus = MIN (modulus, align);
9323 /* If we get here, we were unable to determine anything useful about the
9329 /* Fold a binary expression of code CODE and type TYPE with operands
9330 OP0 and OP1. LOC is the location of the resulting expression.
9331 Return the folded expression if folding is successful. Otherwise,
9332 return NULL_TREE. */
9335 fold_binary_loc (location_t loc,
9336 enum tree_code code, tree type, tree op0, tree op1)
9338 enum tree_code_class kind = TREE_CODE_CLASS (code);
9339 tree arg0, arg1, tem;
9340 tree t1 = NULL_TREE;
9341 bool strict_overflow_p;
9343 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9344 && TREE_CODE_LENGTH (code) == 2
9346 && op1 != NULL_TREE);
9351 /* Strip any conversions that don't change the mode. This is
9352 safe for every expression, except for a comparison expression
9353 because its signedness is derived from its operands. So, in
9354 the latter case, only strip conversions that don't change the
9355 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9358 Note that this is done as an internal manipulation within the
9359 constant folder, in order to find the simplest representation
9360 of the arguments so that their form can be studied. In any
9361 cases, the appropriate type conversions should be put back in
9362 the tree that will get out of the constant folder. */
9364 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9366 STRIP_SIGN_NOPS (arg0);
9367 STRIP_SIGN_NOPS (arg1);
9375 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9376 constant but we can't do arithmetic on them. */
9377 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9378 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9379 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9380 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9381 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9382 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9384 if (kind == tcc_binary)
9386 /* Make sure type and arg0 have the same saturating flag. */
9387 gcc_assert (TYPE_SATURATING (type)
9388 == TYPE_SATURATING (TREE_TYPE (arg0)));
9389 tem = const_binop (code, arg0, arg1);
9391 else if (kind == tcc_comparison)
9392 tem = fold_relational_const (code, type, arg0, arg1);
9396 if (tem != NULL_TREE)
9398 if (TREE_TYPE (tem) != type)
9399 tem = fold_convert_loc (loc, type, tem);
9404 /* If this is a commutative operation, and ARG0 is a constant, move it
9405 to ARG1 to reduce the number of tests below. */
9406 if (commutative_tree_code (code)
9407 && tree_swap_operands_p (arg0, arg1, true))
9408 return fold_build2_loc (loc, code, type, op1, op0);
9410 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9412 First check for cases where an arithmetic operation is applied to a
9413 compound, conditional, or comparison operation. Push the arithmetic
9414 operation inside the compound or conditional to see if any folding
9415 can then be done. Convert comparison to conditional for this purpose.
9416 The also optimizes non-constant cases that used to be done in
9419 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9420 one of the operands is a comparison and the other is a comparison, a
9421 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9422 code below would make the expression more complex. Change it to a
9423 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9424 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9426 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9427 || code == EQ_EXPR || code == NE_EXPR)
9428 && ((truth_value_p (TREE_CODE (arg0))
9429 && (truth_value_p (TREE_CODE (arg1))
9430 || (TREE_CODE (arg1) == BIT_AND_EXPR
9431 && integer_onep (TREE_OPERAND (arg1, 1)))))
9432 || (truth_value_p (TREE_CODE (arg1))
9433 && (truth_value_p (TREE_CODE (arg0))
9434 || (TREE_CODE (arg0) == BIT_AND_EXPR
9435 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9437 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9438 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9441 fold_convert_loc (loc, boolean_type_node, arg0),
9442 fold_convert_loc (loc, boolean_type_node, arg1));
9444 if (code == EQ_EXPR)
9445 tem = invert_truthvalue_loc (loc, tem);
9447 return fold_convert_loc (loc, type, tem);
9450 if (TREE_CODE_CLASS (code) == tcc_binary
9451 || TREE_CODE_CLASS (code) == tcc_comparison)
9453 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9455 tem = fold_build2_loc (loc, code, type,
9456 fold_convert_loc (loc, TREE_TYPE (op0),
9457 TREE_OPERAND (arg0, 1)), op1);
9458 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9461 if (TREE_CODE (arg1) == COMPOUND_EXPR
9462 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9464 tem = fold_build2_loc (loc, code, type, op0,
9465 fold_convert_loc (loc, TREE_TYPE (op1),
9466 TREE_OPERAND (arg1, 1)));
9467 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9471 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9473 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9475 /*cond_first_p=*/1);
9476 if (tem != NULL_TREE)
9480 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9482 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9484 /*cond_first_p=*/0);
9485 if (tem != NULL_TREE)
9493 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9494 if (TREE_CODE (arg0) == ADDR_EXPR
9495 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9497 tree iref = TREE_OPERAND (arg0, 0);
9498 return fold_build2 (MEM_REF, type,
9499 TREE_OPERAND (iref, 0),
9500 int_const_binop (PLUS_EXPR, arg1,
9501 TREE_OPERAND (iref, 1), 0));
9504 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9505 if (TREE_CODE (arg0) == ADDR_EXPR
9506 && handled_component_p (TREE_OPERAND (arg0, 0)))
9509 HOST_WIDE_INT coffset;
9510 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9514 return fold_build2 (MEM_REF, type,
9515 build_fold_addr_expr (base),
9516 int_const_binop (PLUS_EXPR, arg1,
9517 size_int (coffset), 0));
9522 case POINTER_PLUS_EXPR:
9523 /* 0 +p index -> (type)index */
9524 if (integer_zerop (arg0))
9525 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9527 /* PTR +p 0 -> PTR */
9528 if (integer_zerop (arg1))
9529 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9531 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9532 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9533 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9534 return fold_convert_loc (loc, type,
9535 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9536 fold_convert_loc (loc, sizetype,
9538 fold_convert_loc (loc, sizetype,
9541 /* index +p PTR -> PTR +p index */
9542 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9543 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9544 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9545 fold_convert_loc (loc, type, arg1),
9546 fold_convert_loc (loc, sizetype, arg0));
9548 /* (PTR +p B) +p A -> PTR +p (B + A) */
9549 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9552 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9553 tree arg00 = TREE_OPERAND (arg0, 0);
9554 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9555 arg01, fold_convert_loc (loc, sizetype, arg1));
9556 return fold_convert_loc (loc, type,
9557 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9562 /* PTR_CST +p CST -> CST1 */
9563 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9564 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9565 fold_convert_loc (loc, type, arg1));
9567 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9568 of the array. Loop optimizer sometimes produce this type of
9570 if (TREE_CODE (arg0) == ADDR_EXPR)
9572 tem = try_move_mult_to_index (loc, arg0,
9573 fold_convert_loc (loc, sizetype, arg1));
9575 return fold_convert_loc (loc, type, tem);
9581 /* A + (-B) -> A - B */
9582 if (TREE_CODE (arg1) == NEGATE_EXPR)
9583 return fold_build2_loc (loc, MINUS_EXPR, type,
9584 fold_convert_loc (loc, type, arg0),
9585 fold_convert_loc (loc, type,
9586 TREE_OPERAND (arg1, 0)));
9587 /* (-A) + B -> B - A */
9588 if (TREE_CODE (arg0) == NEGATE_EXPR
9589 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9590 return fold_build2_loc (loc, MINUS_EXPR, type,
9591 fold_convert_loc (loc, type, arg1),
9592 fold_convert_loc (loc, type,
9593 TREE_OPERAND (arg0, 0)));
9595 if (INTEGRAL_TYPE_P (type))
9597 /* Convert ~A + 1 to -A. */
9598 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9599 && integer_onep (arg1))
9600 return fold_build1_loc (loc, NEGATE_EXPR, type,
9601 fold_convert_loc (loc, type,
9602 TREE_OPERAND (arg0, 0)));
9605 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9606 && !TYPE_OVERFLOW_TRAPS (type))
9608 tree tem = TREE_OPERAND (arg0, 0);
9611 if (operand_equal_p (tem, arg1, 0))
9613 t1 = build_int_cst_type (type, -1);
9614 return omit_one_operand_loc (loc, type, t1, arg1);
9619 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9620 && !TYPE_OVERFLOW_TRAPS (type))
9622 tree tem = TREE_OPERAND (arg1, 0);
9625 if (operand_equal_p (arg0, tem, 0))
9627 t1 = build_int_cst_type (type, -1);
9628 return omit_one_operand_loc (loc, type, t1, arg0);
9632 /* X + (X / CST) * -CST is X % CST. */
9633 if (TREE_CODE (arg1) == MULT_EXPR
9634 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9635 && operand_equal_p (arg0,
9636 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9638 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9639 tree cst1 = TREE_OPERAND (arg1, 1);
9640 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9642 if (sum && integer_zerop (sum))
9643 return fold_convert_loc (loc, type,
9644 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9645 TREE_TYPE (arg0), arg0,
9650 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9651 same or one. Make sure type is not saturating.
9652 fold_plusminus_mult_expr will re-associate. */
9653 if ((TREE_CODE (arg0) == MULT_EXPR
9654 || TREE_CODE (arg1) == MULT_EXPR)
9655 && !TYPE_SATURATING (type)
9656 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9658 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9663 if (! FLOAT_TYPE_P (type))
9665 if (integer_zerop (arg1))
9666 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9668 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9669 with a constant, and the two constants have no bits in common,
9670 we should treat this as a BIT_IOR_EXPR since this may produce more
9672 if (TREE_CODE (arg0) == BIT_AND_EXPR
9673 && TREE_CODE (arg1) == BIT_AND_EXPR
9674 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9675 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9676 && integer_zerop (const_binop (BIT_AND_EXPR,
9677 TREE_OPERAND (arg0, 1),
9678 TREE_OPERAND (arg1, 1))))
9680 code = BIT_IOR_EXPR;
9684 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9685 (plus (plus (mult) (mult)) (foo)) so that we can
9686 take advantage of the factoring cases below. */
9687 if (((TREE_CODE (arg0) == PLUS_EXPR
9688 || TREE_CODE (arg0) == MINUS_EXPR)
9689 && TREE_CODE (arg1) == MULT_EXPR)
9690 || ((TREE_CODE (arg1) == PLUS_EXPR
9691 || TREE_CODE (arg1) == MINUS_EXPR)
9692 && TREE_CODE (arg0) == MULT_EXPR))
9694 tree parg0, parg1, parg, marg;
9695 enum tree_code pcode;
9697 if (TREE_CODE (arg1) == MULT_EXPR)
9698 parg = arg0, marg = arg1;
9700 parg = arg1, marg = arg0;
9701 pcode = TREE_CODE (parg);
9702 parg0 = TREE_OPERAND (parg, 0);
9703 parg1 = TREE_OPERAND (parg, 1);
9707 if (TREE_CODE (parg0) == MULT_EXPR
9708 && TREE_CODE (parg1) != MULT_EXPR)
9709 return fold_build2_loc (loc, pcode, type,
9710 fold_build2_loc (loc, PLUS_EXPR, type,
9711 fold_convert_loc (loc, type,
9713 fold_convert_loc (loc, type,
9715 fold_convert_loc (loc, type, parg1));
9716 if (TREE_CODE (parg0) != MULT_EXPR
9717 && TREE_CODE (parg1) == MULT_EXPR)
9719 fold_build2_loc (loc, PLUS_EXPR, type,
9720 fold_convert_loc (loc, type, parg0),
9721 fold_build2_loc (loc, pcode, type,
9722 fold_convert_loc (loc, type, marg),
9723 fold_convert_loc (loc, type,
9729 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9730 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9731 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9733 /* Likewise if the operands are reversed. */
9734 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9735 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9737 /* Convert X + -C into X - C. */
9738 if (TREE_CODE (arg1) == REAL_CST
9739 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9741 tem = fold_negate_const (arg1, type);
9742 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9743 return fold_build2_loc (loc, MINUS_EXPR, type,
9744 fold_convert_loc (loc, type, arg0),
9745 fold_convert_loc (loc, type, tem));
9748 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9749 to __complex__ ( x, y ). This is not the same for SNaNs or
9750 if signed zeros are involved. */
9751 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9752 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9753 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9755 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9756 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9757 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9758 bool arg0rz = false, arg0iz = false;
9759 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9760 || (arg0i && (arg0iz = real_zerop (arg0i))))
9762 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9763 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9764 if (arg0rz && arg1i && real_zerop (arg1i))
9766 tree rp = arg1r ? arg1r
9767 : build1 (REALPART_EXPR, rtype, arg1);
9768 tree ip = arg0i ? arg0i
9769 : build1 (IMAGPART_EXPR, rtype, arg0);
9770 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9772 else if (arg0iz && arg1r && real_zerop (arg1r))
9774 tree rp = arg0r ? arg0r
9775 : build1 (REALPART_EXPR, rtype, arg0);
9776 tree ip = arg1i ? arg1i
9777 : build1 (IMAGPART_EXPR, rtype, arg1);
9778 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9783 if (flag_unsafe_math_optimizations
9784 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9785 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9786 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9789 /* Convert x+x into x*2.0. */
9790 if (operand_equal_p (arg0, arg1, 0)
9791 && SCALAR_FLOAT_TYPE_P (type))
9792 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9793 build_real (type, dconst2));
9795 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9796 We associate floats only if the user has specified
9797 -fassociative-math. */
9798 if (flag_associative_math
9799 && TREE_CODE (arg1) == PLUS_EXPR
9800 && TREE_CODE (arg0) != MULT_EXPR)
9802 tree tree10 = TREE_OPERAND (arg1, 0);
9803 tree tree11 = TREE_OPERAND (arg1, 1);
9804 if (TREE_CODE (tree11) == MULT_EXPR
9805 && TREE_CODE (tree10) == MULT_EXPR)
9808 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9809 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9812 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9813 We associate floats only if the user has specified
9814 -fassociative-math. */
9815 if (flag_associative_math
9816 && TREE_CODE (arg0) == PLUS_EXPR
9817 && TREE_CODE (arg1) != MULT_EXPR)
9819 tree tree00 = TREE_OPERAND (arg0, 0);
9820 tree tree01 = TREE_OPERAND (arg0, 1);
9821 if (TREE_CODE (tree01) == MULT_EXPR
9822 && TREE_CODE (tree00) == MULT_EXPR)
9825 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9826 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9832 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9833 is a rotate of A by C1 bits. */
9834 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9835 is a rotate of A by B bits. */
9837 enum tree_code code0, code1;
9839 code0 = TREE_CODE (arg0);
9840 code1 = TREE_CODE (arg1);
9841 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9842 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9843 && operand_equal_p (TREE_OPERAND (arg0, 0),
9844 TREE_OPERAND (arg1, 0), 0)
9845 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9846 TYPE_UNSIGNED (rtype))
9847 /* Only create rotates in complete modes. Other cases are not
9848 expanded properly. */
9849 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9851 tree tree01, tree11;
9852 enum tree_code code01, code11;
9854 tree01 = TREE_OPERAND (arg0, 1);
9855 tree11 = TREE_OPERAND (arg1, 1);
9856 STRIP_NOPS (tree01);
9857 STRIP_NOPS (tree11);
9858 code01 = TREE_CODE (tree01);
9859 code11 = TREE_CODE (tree11);
9860 if (code01 == INTEGER_CST
9861 && code11 == INTEGER_CST
9862 && TREE_INT_CST_HIGH (tree01) == 0
9863 && TREE_INT_CST_HIGH (tree11) == 0
9864 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9865 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9867 tem = build2_loc (loc, LROTATE_EXPR,
9868 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9869 TREE_OPERAND (arg0, 0),
9870 code0 == LSHIFT_EXPR ? tree01 : tree11);
9871 return fold_convert_loc (loc, type, tem);
9873 else if (code11 == MINUS_EXPR)
9875 tree tree110, tree111;
9876 tree110 = TREE_OPERAND (tree11, 0);
9877 tree111 = TREE_OPERAND (tree11, 1);
9878 STRIP_NOPS (tree110);
9879 STRIP_NOPS (tree111);
9880 if (TREE_CODE (tree110) == INTEGER_CST
9881 && 0 == compare_tree_int (tree110,
9883 (TREE_TYPE (TREE_OPERAND
9885 && operand_equal_p (tree01, tree111, 0))
9887 fold_convert_loc (loc, type,
9888 build2 ((code0 == LSHIFT_EXPR
9891 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9892 TREE_OPERAND (arg0, 0), tree01));
9894 else if (code01 == MINUS_EXPR)
9896 tree tree010, tree011;
9897 tree010 = TREE_OPERAND (tree01, 0);
9898 tree011 = TREE_OPERAND (tree01, 1);
9899 STRIP_NOPS (tree010);
9900 STRIP_NOPS (tree011);
9901 if (TREE_CODE (tree010) == INTEGER_CST
9902 && 0 == compare_tree_int (tree010,
9904 (TREE_TYPE (TREE_OPERAND
9906 && operand_equal_p (tree11, tree011, 0))
9907 return fold_convert_loc
9909 build2 ((code0 != LSHIFT_EXPR
9912 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9913 TREE_OPERAND (arg0, 0), tree11));
9919 /* In most languages, can't associate operations on floats through
9920 parentheses. Rather than remember where the parentheses were, we
9921 don't associate floats at all, unless the user has specified
9923 And, we need to make sure type is not saturating. */
9925 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9926 && !TYPE_SATURATING (type))
9928 tree var0, con0, lit0, minus_lit0;
9929 tree var1, con1, lit1, minus_lit1;
9932 /* Split both trees into variables, constants, and literals. Then
9933 associate each group together, the constants with literals,
9934 then the result with variables. This increases the chances of
9935 literals being recombined later and of generating relocatable
9936 expressions for the sum of a constant and literal. */
9937 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9938 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9939 code == MINUS_EXPR);
9941 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9942 if (code == MINUS_EXPR)
9945 /* With undefined overflow we can only associate constants with one
9946 variable, and constants whose association doesn't overflow. */
9947 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9948 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9955 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9956 tmp0 = TREE_OPERAND (tmp0, 0);
9957 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9958 tmp1 = TREE_OPERAND (tmp1, 0);
9959 /* The only case we can still associate with two variables
9960 is if they are the same, modulo negation. */
9961 if (!operand_equal_p (tmp0, tmp1, 0))
9965 if (ok && lit0 && lit1)
9967 tree tmp0 = fold_convert (type, lit0);
9968 tree tmp1 = fold_convert (type, lit1);
9970 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9971 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9976 /* Only do something if we found more than two objects. Otherwise,
9977 nothing has changed and we risk infinite recursion. */
9979 && (2 < ((var0 != 0) + (var1 != 0)
9980 + (con0 != 0) + (con1 != 0)
9981 + (lit0 != 0) + (lit1 != 0)
9982 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9984 var0 = associate_trees (loc, var0, var1, code, type);
9985 con0 = associate_trees (loc, con0, con1, code, type);
9986 lit0 = associate_trees (loc, lit0, lit1, code, type);
9987 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9989 /* Preserve the MINUS_EXPR if the negative part of the literal is
9990 greater than the positive part. Otherwise, the multiplicative
9991 folding code (i.e extract_muldiv) may be fooled in case
9992 unsigned constants are subtracted, like in the following
9993 example: ((X*2 + 4) - 8U)/2. */
9994 if (minus_lit0 && lit0)
9996 if (TREE_CODE (lit0) == INTEGER_CST
9997 && TREE_CODE (minus_lit0) == INTEGER_CST
9998 && tree_int_cst_lt (lit0, minus_lit0))
10000 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10006 lit0 = associate_trees (loc, lit0, minus_lit0,
10015 fold_convert_loc (loc, type,
10016 associate_trees (loc, var0, minus_lit0,
10017 MINUS_EXPR, type));
10020 con0 = associate_trees (loc, con0, minus_lit0,
10023 fold_convert_loc (loc, type,
10024 associate_trees (loc, var0, con0,
10029 con0 = associate_trees (loc, con0, lit0, code, type);
10031 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10039 /* Pointer simplifications for subtraction, simple reassociations. */
10040 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10042 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10043 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10044 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10046 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10047 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10048 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10049 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10050 return fold_build2_loc (loc, PLUS_EXPR, type,
10051 fold_build2_loc (loc, MINUS_EXPR, type,
10053 fold_build2_loc (loc, MINUS_EXPR, type,
10056 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10057 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10059 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10060 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10061 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10062 fold_convert_loc (loc, type, arg1));
10064 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10067 /* A - (-B) -> A + B */
10068 if (TREE_CODE (arg1) == NEGATE_EXPR)
10069 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10070 fold_convert_loc (loc, type,
10071 TREE_OPERAND (arg1, 0)));
10072 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10073 if (TREE_CODE (arg0) == NEGATE_EXPR
10074 && (FLOAT_TYPE_P (type)
10075 || INTEGRAL_TYPE_P (type))
10076 && negate_expr_p (arg1)
10077 && reorder_operands_p (arg0, arg1))
10078 return fold_build2_loc (loc, MINUS_EXPR, type,
10079 fold_convert_loc (loc, type,
10080 negate_expr (arg1)),
10081 fold_convert_loc (loc, type,
10082 TREE_OPERAND (arg0, 0)));
10083 /* Convert -A - 1 to ~A. */
10084 if (INTEGRAL_TYPE_P (type)
10085 && TREE_CODE (arg0) == NEGATE_EXPR
10086 && integer_onep (arg1)
10087 && !TYPE_OVERFLOW_TRAPS (type))
10088 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10089 fold_convert_loc (loc, type,
10090 TREE_OPERAND (arg0, 0)));
10092 /* Convert -1 - A to ~A. */
10093 if (INTEGRAL_TYPE_P (type)
10094 && integer_all_onesp (arg0))
10095 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10098 /* X - (X / CST) * CST is X % CST. */
10099 if (INTEGRAL_TYPE_P (type)
10100 && TREE_CODE (arg1) == MULT_EXPR
10101 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10102 && operand_equal_p (arg0,
10103 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10104 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10105 TREE_OPERAND (arg1, 1), 0))
10107 fold_convert_loc (loc, type,
10108 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10109 arg0, TREE_OPERAND (arg1, 1)));
10111 if (! FLOAT_TYPE_P (type))
10113 if (integer_zerop (arg0))
10114 return negate_expr (fold_convert_loc (loc, type, arg1));
10115 if (integer_zerop (arg1))
10116 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10118 /* Fold A - (A & B) into ~B & A. */
10119 if (!TREE_SIDE_EFFECTS (arg0)
10120 && TREE_CODE (arg1) == BIT_AND_EXPR)
10122 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10124 tree arg10 = fold_convert_loc (loc, type,
10125 TREE_OPERAND (arg1, 0));
10126 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10127 fold_build1_loc (loc, BIT_NOT_EXPR,
10129 fold_convert_loc (loc, type, arg0));
10131 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10133 tree arg11 = fold_convert_loc (loc,
10134 type, TREE_OPERAND (arg1, 1));
10135 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10136 fold_build1_loc (loc, BIT_NOT_EXPR,
10138 fold_convert_loc (loc, type, arg0));
10142 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10143 any power of 2 minus 1. */
10144 if (TREE_CODE (arg0) == BIT_AND_EXPR
10145 && TREE_CODE (arg1) == BIT_AND_EXPR
10146 && operand_equal_p (TREE_OPERAND (arg0, 0),
10147 TREE_OPERAND (arg1, 0), 0))
10149 tree mask0 = TREE_OPERAND (arg0, 1);
10150 tree mask1 = TREE_OPERAND (arg1, 1);
10151 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10153 if (operand_equal_p (tem, mask1, 0))
10155 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10156 TREE_OPERAND (arg0, 0), mask1);
10157 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10162 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10163 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10164 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10166 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10167 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10168 (-ARG1 + ARG0) reduces to -ARG1. */
10169 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10170 return negate_expr (fold_convert_loc (loc, type, arg1));
10172 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10173 __complex__ ( x, -y ). This is not the same for SNaNs or if
10174 signed zeros are involved. */
10175 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10176 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10177 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10179 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10180 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10181 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10182 bool arg0rz = false, arg0iz = false;
10183 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10184 || (arg0i && (arg0iz = real_zerop (arg0i))))
10186 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10187 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10188 if (arg0rz && arg1i && real_zerop (arg1i))
10190 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10192 : build1 (REALPART_EXPR, rtype, arg1));
10193 tree ip = arg0i ? arg0i
10194 : build1 (IMAGPART_EXPR, rtype, arg0);
10195 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10197 else if (arg0iz && arg1r && real_zerop (arg1r))
10199 tree rp = arg0r ? arg0r
10200 : build1 (REALPART_EXPR, rtype, arg0);
10201 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10203 : build1 (IMAGPART_EXPR, rtype, arg1));
10204 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10209 /* Fold &x - &x. This can happen from &x.foo - &x.
10210 This is unsafe for certain floats even in non-IEEE formats.
10211 In IEEE, it is unsafe because it does wrong for NaNs.
10212 Also note that operand_equal_p is always false if an operand
10215 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10216 && operand_equal_p (arg0, arg1, 0))
10217 return build_zero_cst (type);
10219 /* A - B -> A + (-B) if B is easily negatable. */
10220 if (negate_expr_p (arg1)
10221 && ((FLOAT_TYPE_P (type)
10222 /* Avoid this transformation if B is a positive REAL_CST. */
10223 && (TREE_CODE (arg1) != REAL_CST
10224 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10225 || INTEGRAL_TYPE_P (type)))
10226 return fold_build2_loc (loc, PLUS_EXPR, type,
10227 fold_convert_loc (loc, type, arg0),
10228 fold_convert_loc (loc, type,
10229 negate_expr (arg1)));
10231 /* Try folding difference of addresses. */
10233 HOST_WIDE_INT diff;
10235 if ((TREE_CODE (arg0) == ADDR_EXPR
10236 || TREE_CODE (arg1) == ADDR_EXPR)
10237 && ptr_difference_const (arg0, arg1, &diff))
10238 return build_int_cst_type (type, diff);
10241 /* Fold &a[i] - &a[j] to i-j. */
10242 if (TREE_CODE (arg0) == ADDR_EXPR
10243 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10244 && TREE_CODE (arg1) == ADDR_EXPR
10245 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10247 tree aref0 = TREE_OPERAND (arg0, 0);
10248 tree aref1 = TREE_OPERAND (arg1, 0);
10249 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10250 TREE_OPERAND (aref1, 0), 0))
10252 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10253 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10254 tree esz = array_ref_element_size (aref0);
10255 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10256 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10257 fold_convert_loc (loc, type, esz));
10262 if (FLOAT_TYPE_P (type)
10263 && flag_unsafe_math_optimizations
10264 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10265 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10266 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10269 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10270 same or one. Make sure type is not saturating.
10271 fold_plusminus_mult_expr will re-associate. */
10272 if ((TREE_CODE (arg0) == MULT_EXPR
10273 || TREE_CODE (arg1) == MULT_EXPR)
10274 && !TYPE_SATURATING (type)
10275 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10277 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10285 /* (-A) * (-B) -> A * B */
10286 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10287 return fold_build2_loc (loc, MULT_EXPR, type,
10288 fold_convert_loc (loc, type,
10289 TREE_OPERAND (arg0, 0)),
10290 fold_convert_loc (loc, type,
10291 negate_expr (arg1)));
10292 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10293 return fold_build2_loc (loc, MULT_EXPR, type,
10294 fold_convert_loc (loc, type,
10295 negate_expr (arg0)),
10296 fold_convert_loc (loc, type,
10297 TREE_OPERAND (arg1, 0)));
10299 if (! FLOAT_TYPE_P (type))
10301 if (integer_zerop (arg1))
10302 return omit_one_operand_loc (loc, type, arg1, arg0);
10303 if (integer_onep (arg1))
10304 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10305 /* Transform x * -1 into -x. Make sure to do the negation
10306 on the original operand with conversions not stripped
10307 because we can only strip non-sign-changing conversions. */
10308 if (integer_all_onesp (arg1))
10309 return fold_convert_loc (loc, type, negate_expr (op0));
10310 /* Transform x * -C into -x * C if x is easily negatable. */
10311 if (TREE_CODE (arg1) == INTEGER_CST
10312 && tree_int_cst_sgn (arg1) == -1
10313 && negate_expr_p (arg0)
10314 && (tem = negate_expr (arg1)) != arg1
10315 && !TREE_OVERFLOW (tem))
10316 return fold_build2_loc (loc, MULT_EXPR, type,
10317 fold_convert_loc (loc, type,
10318 negate_expr (arg0)),
10321 /* (a * (1 << b)) is (a << b) */
10322 if (TREE_CODE (arg1) == LSHIFT_EXPR
10323 && integer_onep (TREE_OPERAND (arg1, 0)))
10324 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10325 TREE_OPERAND (arg1, 1));
10326 if (TREE_CODE (arg0) == LSHIFT_EXPR
10327 && integer_onep (TREE_OPERAND (arg0, 0)))
10328 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10329 TREE_OPERAND (arg0, 1));
10331 /* (A + A) * C -> A * 2 * C */
10332 if (TREE_CODE (arg0) == PLUS_EXPR
10333 && TREE_CODE (arg1) == INTEGER_CST
10334 && operand_equal_p (TREE_OPERAND (arg0, 0),
10335 TREE_OPERAND (arg0, 1), 0))
10336 return fold_build2_loc (loc, MULT_EXPR, type,
10337 omit_one_operand_loc (loc, type,
10338 TREE_OPERAND (arg0, 0),
10339 TREE_OPERAND (arg0, 1)),
10340 fold_build2_loc (loc, MULT_EXPR, type,
10341 build_int_cst (type, 2) , arg1));
10343 strict_overflow_p = false;
10344 if (TREE_CODE (arg1) == INTEGER_CST
10345 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10346 &strict_overflow_p)))
10348 if (strict_overflow_p)
10349 fold_overflow_warning (("assuming signed overflow does not "
10350 "occur when simplifying "
10352 WARN_STRICT_OVERFLOW_MISC);
10353 return fold_convert_loc (loc, type, tem);
10356 /* Optimize z * conj(z) for integer complex numbers. */
10357 if (TREE_CODE (arg0) == CONJ_EXPR
10358 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10359 return fold_mult_zconjz (loc, type, arg1);
10360 if (TREE_CODE (arg1) == CONJ_EXPR
10361 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10362 return fold_mult_zconjz (loc, type, arg0);
10366 /* Maybe fold x * 0 to 0. The expressions aren't the same
10367 when x is NaN, since x * 0 is also NaN. Nor are they the
10368 same in modes with signed zeros, since multiplying a
10369 negative value by 0 gives -0, not +0. */
10370 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10371 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10372 && real_zerop (arg1))
10373 return omit_one_operand_loc (loc, type, arg1, arg0);
10374 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10375 Likewise for complex arithmetic with signed zeros. */
10376 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10377 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10378 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10379 && real_onep (arg1))
10380 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10382 /* Transform x * -1.0 into -x. */
10383 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10384 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10385 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10386 && real_minus_onep (arg1))
10387 return fold_convert_loc (loc, type, negate_expr (arg0));
10389 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10390 the result for floating point types due to rounding so it is applied
10391 only if -fassociative-math was specify. */
10392 if (flag_associative_math
10393 && TREE_CODE (arg0) == RDIV_EXPR
10394 && TREE_CODE (arg1) == REAL_CST
10395 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10397 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10400 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10401 TREE_OPERAND (arg0, 1));
10404 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10405 if (operand_equal_p (arg0, arg1, 0))
10407 tree tem = fold_strip_sign_ops (arg0);
10408 if (tem != NULL_TREE)
10410 tem = fold_convert_loc (loc, type, tem);
10411 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10415 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10416 This is not the same for NaNs or if signed zeros are
10418 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10419 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10420 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10421 && TREE_CODE (arg1) == COMPLEX_CST
10422 && real_zerop (TREE_REALPART (arg1)))
10424 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10425 if (real_onep (TREE_IMAGPART (arg1)))
10427 fold_build2_loc (loc, COMPLEX_EXPR, type,
10428 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10430 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10431 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10433 fold_build2_loc (loc, COMPLEX_EXPR, type,
10434 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10435 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10439 /* Optimize z * conj(z) for floating point complex numbers.
10440 Guarded by flag_unsafe_math_optimizations as non-finite
10441 imaginary components don't produce scalar results. */
10442 if (flag_unsafe_math_optimizations
10443 && TREE_CODE (arg0) == CONJ_EXPR
10444 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10445 return fold_mult_zconjz (loc, type, arg1);
10446 if (flag_unsafe_math_optimizations
10447 && TREE_CODE (arg1) == CONJ_EXPR
10448 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10449 return fold_mult_zconjz (loc, type, arg0);
10451 if (flag_unsafe_math_optimizations)
10453 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10454 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10456 /* Optimizations of root(...)*root(...). */
10457 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10460 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10461 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10463 /* Optimize sqrt(x)*sqrt(x) as x. */
10464 if (BUILTIN_SQRT_P (fcode0)
10465 && operand_equal_p (arg00, arg10, 0)
10466 && ! HONOR_SNANS (TYPE_MODE (type)))
10469 /* Optimize root(x)*root(y) as root(x*y). */
10470 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10471 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10472 return build_call_expr_loc (loc, rootfn, 1, arg);
10475 /* Optimize expN(x)*expN(y) as expN(x+y). */
10476 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10478 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10479 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10480 CALL_EXPR_ARG (arg0, 0),
10481 CALL_EXPR_ARG (arg1, 0));
10482 return build_call_expr_loc (loc, expfn, 1, arg);
10485 /* Optimizations of pow(...)*pow(...). */
10486 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10487 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10488 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10490 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10491 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10492 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10493 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10495 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10496 if (operand_equal_p (arg01, arg11, 0))
10498 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10499 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10501 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10504 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10505 if (operand_equal_p (arg00, arg10, 0))
10507 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10508 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10510 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10514 /* Optimize tan(x)*cos(x) as sin(x). */
10515 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10516 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10517 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10518 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10519 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10520 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10521 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10522 CALL_EXPR_ARG (arg1, 0), 0))
10524 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10526 if (sinfn != NULL_TREE)
10527 return build_call_expr_loc (loc, sinfn, 1,
10528 CALL_EXPR_ARG (arg0, 0));
10531 /* Optimize x*pow(x,c) as pow(x,c+1). */
10532 if (fcode1 == BUILT_IN_POW
10533 || fcode1 == BUILT_IN_POWF
10534 || fcode1 == BUILT_IN_POWL)
10536 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10537 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10538 if (TREE_CODE (arg11) == REAL_CST
10539 && !TREE_OVERFLOW (arg11)
10540 && operand_equal_p (arg0, arg10, 0))
10542 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10546 c = TREE_REAL_CST (arg11);
10547 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10548 arg = build_real (type, c);
10549 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10553 /* Optimize pow(x,c)*x as pow(x,c+1). */
10554 if (fcode0 == BUILT_IN_POW
10555 || fcode0 == BUILT_IN_POWF
10556 || fcode0 == BUILT_IN_POWL)
10558 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10559 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10560 if (TREE_CODE (arg01) == REAL_CST
10561 && !TREE_OVERFLOW (arg01)
10562 && operand_equal_p (arg1, arg00, 0))
10564 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10568 c = TREE_REAL_CST (arg01);
10569 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10570 arg = build_real (type, c);
10571 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10575 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10576 if (optimize_function_for_speed_p (cfun)
10577 && operand_equal_p (arg0, arg1, 0))
10579 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10583 tree arg = build_real (type, dconst2);
10584 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10593 if (integer_all_onesp (arg1))
10594 return omit_one_operand_loc (loc, type, arg1, arg0);
10595 if (integer_zerop (arg1))
10596 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10597 if (operand_equal_p (arg0, arg1, 0))
10598 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10600 /* ~X | X is -1. */
10601 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10604 t1 = build_zero_cst (type);
10605 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10606 return omit_one_operand_loc (loc, type, t1, arg1);
10609 /* X | ~X is -1. */
10610 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10611 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10613 t1 = build_zero_cst (type);
10614 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10615 return omit_one_operand_loc (loc, type, t1, arg0);
10618 /* Canonicalize (X & C1) | C2. */
10619 if (TREE_CODE (arg0) == BIT_AND_EXPR
10620 && TREE_CODE (arg1) == INTEGER_CST
10621 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10623 double_int c1, c2, c3, msk;
10624 int width = TYPE_PRECISION (type), w;
10625 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
10626 c2 = tree_to_double_int (arg1);
10628 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10629 if (double_int_equal_p (double_int_and (c1, c2), c1))
10630 return omit_one_operand_loc (loc, type, arg1,
10631 TREE_OPERAND (arg0, 0));
10633 msk = double_int_mask (width);
10635 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10636 if (double_int_zero_p (double_int_and_not (msk,
10637 double_int_ior (c1, c2))))
10638 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10639 TREE_OPERAND (arg0, 0), arg1);
10641 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10642 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10643 mode which allows further optimizations. */
10644 c1 = double_int_and (c1, msk);
10645 c2 = double_int_and (c2, msk);
10646 c3 = double_int_and_not (c1, c2);
10647 for (w = BITS_PER_UNIT;
10648 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10651 unsigned HOST_WIDE_INT mask
10652 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10653 if (((c1.low | c2.low) & mask) == mask
10654 && (c1.low & ~mask) == 0 && c1.high == 0)
10656 c3 = uhwi_to_double_int (mask);
10660 if (!double_int_equal_p (c3, c1))
10661 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10662 fold_build2_loc (loc, BIT_AND_EXPR, type,
10663 TREE_OPERAND (arg0, 0),
10664 double_int_to_tree (type,
10669 /* (X & Y) | Y is (X, Y). */
10670 if (TREE_CODE (arg0) == BIT_AND_EXPR
10671 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10672 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10673 /* (X & Y) | X is (Y, X). */
10674 if (TREE_CODE (arg0) == BIT_AND_EXPR
10675 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10676 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10677 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10678 /* X | (X & Y) is (Y, X). */
10679 if (TREE_CODE (arg1) == BIT_AND_EXPR
10680 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10681 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10682 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10683 /* X | (Y & X) is (Y, X). */
10684 if (TREE_CODE (arg1) == BIT_AND_EXPR
10685 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10686 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10687 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10689 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10690 if (t1 != NULL_TREE)
10693 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10695 This results in more efficient code for machines without a NAND
10696 instruction. Combine will canonicalize to the first form
10697 which will allow use of NAND instructions provided by the
10698 backend if they exist. */
10699 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10700 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10703 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10704 build2 (BIT_AND_EXPR, type,
10705 fold_convert_loc (loc, type,
10706 TREE_OPERAND (arg0, 0)),
10707 fold_convert_loc (loc, type,
10708 TREE_OPERAND (arg1, 0))));
10711 /* See if this can be simplified into a rotate first. If that
10712 is unsuccessful continue in the association code. */
10716 if (integer_zerop (arg1))
10717 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10718 if (integer_all_onesp (arg1))
10719 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10720 if (operand_equal_p (arg0, arg1, 0))
10721 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10723 /* ~X ^ X is -1. */
10724 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10725 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10727 t1 = build_zero_cst (type);
10728 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10729 return omit_one_operand_loc (loc, type, t1, arg1);
10732 /* X ^ ~X is -1. */
10733 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10734 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10736 t1 = build_zero_cst (type);
10737 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10738 return omit_one_operand_loc (loc, type, t1, arg0);
10741 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10742 with a constant, and the two constants have no bits in common,
10743 we should treat this as a BIT_IOR_EXPR since this may produce more
10744 simplifications. */
10745 if (TREE_CODE (arg0) == BIT_AND_EXPR
10746 && TREE_CODE (arg1) == BIT_AND_EXPR
10747 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10748 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10749 && integer_zerop (const_binop (BIT_AND_EXPR,
10750 TREE_OPERAND (arg0, 1),
10751 TREE_OPERAND (arg1, 1))))
10753 code = BIT_IOR_EXPR;
10757 /* (X | Y) ^ X -> Y & ~ X*/
10758 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10759 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10761 tree t2 = TREE_OPERAND (arg0, 1);
10762 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10764 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10765 fold_convert_loc (loc, type, t2),
10766 fold_convert_loc (loc, type, t1));
10770 /* (Y | X) ^ X -> Y & ~ X*/
10771 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10772 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10774 tree t2 = TREE_OPERAND (arg0, 0);
10775 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10777 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10778 fold_convert_loc (loc, type, t2),
10779 fold_convert_loc (loc, type, t1));
10783 /* X ^ (X | Y) -> Y & ~ X*/
10784 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10785 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10787 tree t2 = TREE_OPERAND (arg1, 1);
10788 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10790 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10791 fold_convert_loc (loc, type, t2),
10792 fold_convert_loc (loc, type, t1));
10796 /* X ^ (Y | X) -> Y & ~ X*/
10797 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10798 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10800 tree t2 = TREE_OPERAND (arg1, 0);
10801 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10803 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10804 fold_convert_loc (loc, type, t2),
10805 fold_convert_loc (loc, type, t1));
10809 /* Convert ~X ^ ~Y to X ^ Y. */
10810 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10811 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10812 return fold_build2_loc (loc, code, type,
10813 fold_convert_loc (loc, type,
10814 TREE_OPERAND (arg0, 0)),
10815 fold_convert_loc (loc, type,
10816 TREE_OPERAND (arg1, 0)));
10818 /* Convert ~X ^ C to X ^ ~C. */
10819 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10820 && TREE_CODE (arg1) == INTEGER_CST)
10821 return fold_build2_loc (loc, code, type,
10822 fold_convert_loc (loc, type,
10823 TREE_OPERAND (arg0, 0)),
10824 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10826 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10827 if (TREE_CODE (arg0) == BIT_AND_EXPR
10828 && integer_onep (TREE_OPERAND (arg0, 1))
10829 && integer_onep (arg1))
10830 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10831 build_int_cst (TREE_TYPE (arg0), 0));
10833 /* Fold (X & Y) ^ Y as ~X & Y. */
10834 if (TREE_CODE (arg0) == BIT_AND_EXPR
10835 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10837 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10838 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10839 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10840 fold_convert_loc (loc, type, arg1));
10842 /* Fold (X & Y) ^ X as ~Y & X. */
10843 if (TREE_CODE (arg0) == BIT_AND_EXPR
10844 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10845 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10847 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10848 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10849 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10850 fold_convert_loc (loc, type, arg1));
10852 /* Fold X ^ (X & Y) as X & ~Y. */
10853 if (TREE_CODE (arg1) == BIT_AND_EXPR
10854 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10856 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10857 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10858 fold_convert_loc (loc, type, arg0),
10859 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10861 /* Fold X ^ (Y & X) as ~Y & X. */
10862 if (TREE_CODE (arg1) == BIT_AND_EXPR
10863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10864 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10866 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10867 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10868 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10869 fold_convert_loc (loc, type, arg0));
10872 /* See if this can be simplified into a rotate first. If that
10873 is unsuccessful continue in the association code. */
10877 if (integer_all_onesp (arg1))
10878 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10879 if (integer_zerop (arg1))
10880 return omit_one_operand_loc (loc, type, arg1, arg0);
10881 if (operand_equal_p (arg0, arg1, 0))
10882 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10884 /* ~X & X is always zero. */
10885 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10886 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10887 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10889 /* X & ~X is always zero. */
10890 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10891 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10892 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10894 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10895 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10896 && TREE_CODE (arg1) == INTEGER_CST
10897 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10899 tree tmp1 = fold_convert_loc (loc, type, arg1);
10900 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10901 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10902 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10903 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10905 fold_convert_loc (loc, type,
10906 fold_build2_loc (loc, BIT_IOR_EXPR,
10907 type, tmp2, tmp3));
10910 /* (X | Y) & Y is (X, Y). */
10911 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10912 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10913 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10914 /* (X | Y) & X is (Y, X). */
10915 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10916 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10917 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10918 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10919 /* X & (X | Y) is (Y, X). */
10920 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10921 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10922 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10923 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10924 /* X & (Y | X) is (Y, X). */
10925 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10926 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10927 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10928 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10930 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10931 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10932 && integer_onep (TREE_OPERAND (arg0, 1))
10933 && integer_onep (arg1))
10935 tem = TREE_OPERAND (arg0, 0);
10936 return fold_build2_loc (loc, EQ_EXPR, type,
10937 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10938 build_int_cst (TREE_TYPE (tem), 1)),
10939 build_int_cst (TREE_TYPE (tem), 0));
10941 /* Fold ~X & 1 as (X & 1) == 0. */
10942 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10943 && integer_onep (arg1))
10945 tem = TREE_OPERAND (arg0, 0);
10946 return fold_build2_loc (loc, EQ_EXPR, type,
10947 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10948 build_int_cst (TREE_TYPE (tem), 1)),
10949 build_int_cst (TREE_TYPE (tem), 0));
10952 /* Fold (X ^ Y) & Y as ~X & Y. */
10953 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10954 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10956 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10957 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10958 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10959 fold_convert_loc (loc, type, arg1));
10961 /* Fold (X ^ Y) & X as ~Y & X. */
10962 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10963 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10964 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10966 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10967 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10968 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10969 fold_convert_loc (loc, type, arg1));
10971 /* Fold X & (X ^ Y) as X & ~Y. */
10972 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10973 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10975 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10976 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10977 fold_convert_loc (loc, type, arg0),
10978 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10980 /* Fold X & (Y ^ X) as ~Y & X. */
10981 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10982 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10983 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10985 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10986 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10987 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10988 fold_convert_loc (loc, type, arg0));
10991 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10992 ((A & N) + B) & M -> (A + B) & M
10993 Similarly if (N & M) == 0,
10994 ((A | N) + B) & M -> (A + B) & M
10995 and for - instead of + (or unary - instead of +)
10996 and/or ^ instead of |.
10997 If B is constant and (B & M) == 0, fold into A & M. */
10998 if (host_integerp (arg1, 1))
11000 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11001 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11002 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11003 && (TREE_CODE (arg0) == PLUS_EXPR
11004 || TREE_CODE (arg0) == MINUS_EXPR
11005 || TREE_CODE (arg0) == NEGATE_EXPR)
11006 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11007 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11011 unsigned HOST_WIDE_INT cst0;
11013 /* Now we know that arg0 is (C + D) or (C - D) or
11014 -C and arg1 (M) is == (1LL << cst) - 1.
11015 Store C into PMOP[0] and D into PMOP[1]. */
11016 pmop[0] = TREE_OPERAND (arg0, 0);
11018 if (TREE_CODE (arg0) != NEGATE_EXPR)
11020 pmop[1] = TREE_OPERAND (arg0, 1);
11024 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11025 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11029 for (; which >= 0; which--)
11030 switch (TREE_CODE (pmop[which]))
11035 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11038 /* tree_low_cst not used, because we don't care about
11040 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11042 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11047 else if (cst0 != 0)
11049 /* If C or D is of the form (A & N) where
11050 (N & M) == M, or of the form (A | N) or
11051 (A ^ N) where (N & M) == 0, replace it with A. */
11052 pmop[which] = TREE_OPERAND (pmop[which], 0);
11055 /* If C or D is a N where (N & M) == 0, it can be
11056 omitted (assumed 0). */
11057 if ((TREE_CODE (arg0) == PLUS_EXPR
11058 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11059 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11060 pmop[which] = NULL;
11066 /* Only build anything new if we optimized one or both arguments
11068 if (pmop[0] != TREE_OPERAND (arg0, 0)
11069 || (TREE_CODE (arg0) != NEGATE_EXPR
11070 && pmop[1] != TREE_OPERAND (arg0, 1)))
11072 tree utype = TREE_TYPE (arg0);
11073 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11075 /* Perform the operations in a type that has defined
11076 overflow behavior. */
11077 utype = unsigned_type_for (TREE_TYPE (arg0));
11078 if (pmop[0] != NULL)
11079 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11080 if (pmop[1] != NULL)
11081 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11084 if (TREE_CODE (arg0) == NEGATE_EXPR)
11085 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11086 else if (TREE_CODE (arg0) == PLUS_EXPR)
11088 if (pmop[0] != NULL && pmop[1] != NULL)
11089 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11091 else if (pmop[0] != NULL)
11093 else if (pmop[1] != NULL)
11096 return build_int_cst (type, 0);
11098 else if (pmop[0] == NULL)
11099 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11101 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11103 /* TEM is now the new binary +, - or unary - replacement. */
11104 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11105 fold_convert_loc (loc, utype, arg1));
11106 return fold_convert_loc (loc, type, tem);
11111 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11112 if (t1 != NULL_TREE)
11114 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11115 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11116 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11119 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11121 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11122 && (~TREE_INT_CST_LOW (arg1)
11123 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11125 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11128 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11130 This results in more efficient code for machines without a NOR
11131 instruction. Combine will canonicalize to the first form
11132 which will allow use of NOR instructions provided by the
11133 backend if they exist. */
11134 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11135 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11137 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11138 build2 (BIT_IOR_EXPR, type,
11139 fold_convert_loc (loc, type,
11140 TREE_OPERAND (arg0, 0)),
11141 fold_convert_loc (loc, type,
11142 TREE_OPERAND (arg1, 0))));
11145 /* If arg0 is derived from the address of an object or function, we may
11146 be able to fold this expression using the object or function's
11148 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11150 unsigned HOST_WIDE_INT modulus, residue;
11151 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11153 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11154 integer_onep (arg1));
11156 /* This works because modulus is a power of 2. If this weren't the
11157 case, we'd have to replace it by its greatest power-of-2
11158 divisor: modulus & -modulus. */
11160 return build_int_cst (type, residue & low);
11163 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11164 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11165 if the new mask might be further optimized. */
11166 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11167 || TREE_CODE (arg0) == RSHIFT_EXPR)
11168 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11169 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11170 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11171 < TYPE_PRECISION (TREE_TYPE (arg0))
11172 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11173 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11175 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11176 unsigned HOST_WIDE_INT mask
11177 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11178 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11179 tree shift_type = TREE_TYPE (arg0);
11181 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11182 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11183 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11184 && TYPE_PRECISION (TREE_TYPE (arg0))
11185 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11187 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11188 tree arg00 = TREE_OPERAND (arg0, 0);
11189 /* See if more bits can be proven as zero because of
11191 if (TREE_CODE (arg00) == NOP_EXPR
11192 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11194 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11195 if (TYPE_PRECISION (inner_type)
11196 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11197 && TYPE_PRECISION (inner_type) < prec)
11199 prec = TYPE_PRECISION (inner_type);
11200 /* See if we can shorten the right shift. */
11202 shift_type = inner_type;
11205 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11206 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11207 zerobits <<= prec - shiftc;
11208 /* For arithmetic shift if sign bit could be set, zerobits
11209 can contain actually sign bits, so no transformation is
11210 possible, unless MASK masks them all away. In that
11211 case the shift needs to be converted into logical shift. */
11212 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11213 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11215 if ((mask & zerobits) == 0)
11216 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11222 /* ((X << 16) & 0xff00) is (X, 0). */
11223 if ((mask & zerobits) == mask)
11224 return omit_one_operand_loc (loc, type,
11225 build_int_cst (type, 0), arg0);
11227 newmask = mask | zerobits;
11228 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11232 /* Only do the transformation if NEWMASK is some integer
11234 for (prec = BITS_PER_UNIT;
11235 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11236 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11238 if (prec < HOST_BITS_PER_WIDE_INT
11239 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11243 if (shift_type != TREE_TYPE (arg0))
11245 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11246 fold_convert_loc (loc, shift_type,
11247 TREE_OPERAND (arg0, 0)),
11248 TREE_OPERAND (arg0, 1));
11249 tem = fold_convert_loc (loc, type, tem);
11253 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11254 if (!tree_int_cst_equal (newmaskt, arg1))
11255 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11263 /* Don't touch a floating-point divide by zero unless the mode
11264 of the constant can represent infinity. */
11265 if (TREE_CODE (arg1) == REAL_CST
11266 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11267 && real_zerop (arg1))
11270 /* Optimize A / A to 1.0 if we don't care about
11271 NaNs or Infinities. Skip the transformation
11272 for non-real operands. */
11273 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11274 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11275 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11276 && operand_equal_p (arg0, arg1, 0))
11278 tree r = build_real (TREE_TYPE (arg0), dconst1);
11280 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11283 /* The complex version of the above A / A optimization. */
11284 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11285 && operand_equal_p (arg0, arg1, 0))
11287 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11288 if (! HONOR_NANS (TYPE_MODE (elem_type))
11289 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11291 tree r = build_real (elem_type, dconst1);
11292 /* omit_two_operands will call fold_convert for us. */
11293 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11297 /* (-A) / (-B) -> A / B */
11298 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11299 return fold_build2_loc (loc, RDIV_EXPR, type,
11300 TREE_OPERAND (arg0, 0),
11301 negate_expr (arg1));
11302 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11303 return fold_build2_loc (loc, RDIV_EXPR, type,
11304 negate_expr (arg0),
11305 TREE_OPERAND (arg1, 0));
11307 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11308 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11309 && real_onep (arg1))
11310 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11312 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11313 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11314 && real_minus_onep (arg1))
11315 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11316 negate_expr (arg0)));
11318 /* If ARG1 is a constant, we can convert this to a multiply by the
11319 reciprocal. This does not have the same rounding properties,
11320 so only do this if -freciprocal-math. We can actually
11321 always safely do it if ARG1 is a power of two, but it's hard to
11322 tell if it is or not in a portable manner. */
11323 if (TREE_CODE (arg1) == REAL_CST)
11325 if (flag_reciprocal_math
11326 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11328 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11329 /* Find the reciprocal if optimizing and the result is exact. */
11333 r = TREE_REAL_CST (arg1);
11334 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11336 tem = build_real (type, r);
11337 return fold_build2_loc (loc, MULT_EXPR, type,
11338 fold_convert_loc (loc, type, arg0), tem);
11342 /* Convert A/B/C to A/(B*C). */
11343 if (flag_reciprocal_math
11344 && TREE_CODE (arg0) == RDIV_EXPR)
11345 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11346 fold_build2_loc (loc, MULT_EXPR, type,
11347 TREE_OPERAND (arg0, 1), arg1));
11349 /* Convert A/(B/C) to (A/B)*C. */
11350 if (flag_reciprocal_math
11351 && TREE_CODE (arg1) == RDIV_EXPR)
11352 return fold_build2_loc (loc, MULT_EXPR, type,
11353 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11354 TREE_OPERAND (arg1, 0)),
11355 TREE_OPERAND (arg1, 1));
11357 /* Convert C1/(X*C2) into (C1/C2)/X. */
11358 if (flag_reciprocal_math
11359 && TREE_CODE (arg1) == MULT_EXPR
11360 && TREE_CODE (arg0) == REAL_CST
11361 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11363 tree tem = const_binop (RDIV_EXPR, arg0,
11364 TREE_OPERAND (arg1, 1));
11366 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11367 TREE_OPERAND (arg1, 0));
11370 if (flag_unsafe_math_optimizations)
11372 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11373 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11375 /* Optimize sin(x)/cos(x) as tan(x). */
11376 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11377 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11378 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11379 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11380 CALL_EXPR_ARG (arg1, 0), 0))
11382 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11384 if (tanfn != NULL_TREE)
11385 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11388 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11389 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11390 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11391 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11392 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11393 CALL_EXPR_ARG (arg1, 0), 0))
11395 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11397 if (tanfn != NULL_TREE)
11399 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11400 CALL_EXPR_ARG (arg0, 0));
11401 return fold_build2_loc (loc, RDIV_EXPR, type,
11402 build_real (type, dconst1), tmp);
11406 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11407 NaNs or Infinities. */
11408 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11409 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11410 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11412 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11413 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11415 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11416 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11417 && operand_equal_p (arg00, arg01, 0))
11419 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11421 if (cosfn != NULL_TREE)
11422 return build_call_expr_loc (loc, cosfn, 1, arg00);
11426 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11427 NaNs or Infinities. */
11428 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11429 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11430 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11432 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11433 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11435 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11436 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11437 && operand_equal_p (arg00, arg01, 0))
11439 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11441 if (cosfn != NULL_TREE)
11443 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11444 return fold_build2_loc (loc, RDIV_EXPR, type,
11445 build_real (type, dconst1),
11451 /* Optimize pow(x,c)/x as pow(x,c-1). */
11452 if (fcode0 == BUILT_IN_POW
11453 || fcode0 == BUILT_IN_POWF
11454 || fcode0 == BUILT_IN_POWL)
11456 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11457 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11458 if (TREE_CODE (arg01) == REAL_CST
11459 && !TREE_OVERFLOW (arg01)
11460 && operand_equal_p (arg1, arg00, 0))
11462 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11466 c = TREE_REAL_CST (arg01);
11467 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11468 arg = build_real (type, c);
11469 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11473 /* Optimize a/root(b/c) into a*root(c/b). */
11474 if (BUILTIN_ROOT_P (fcode1))
11476 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11478 if (TREE_CODE (rootarg) == RDIV_EXPR)
11480 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11481 tree b = TREE_OPERAND (rootarg, 0);
11482 tree c = TREE_OPERAND (rootarg, 1);
11484 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11486 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11487 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11491 /* Optimize x/expN(y) into x*expN(-y). */
11492 if (BUILTIN_EXPONENT_P (fcode1))
11494 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11495 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11496 arg1 = build_call_expr_loc (loc,
11498 fold_convert_loc (loc, type, arg));
11499 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11502 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11503 if (fcode1 == BUILT_IN_POW
11504 || fcode1 == BUILT_IN_POWF
11505 || fcode1 == BUILT_IN_POWL)
11507 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11508 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11509 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11510 tree neg11 = fold_convert_loc (loc, type,
11511 negate_expr (arg11));
11512 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11513 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11518 case TRUNC_DIV_EXPR:
11519 /* Optimize (X & (-A)) / A where A is a power of 2,
11521 if (TREE_CODE (arg0) == BIT_AND_EXPR
11522 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11523 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11525 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11526 arg1, TREE_OPERAND (arg0, 1));
11527 if (sum && integer_zerop (sum)) {
11528 unsigned long pow2;
11530 if (TREE_INT_CST_LOW (arg1))
11531 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11533 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11534 + HOST_BITS_PER_WIDE_INT;
11536 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11537 TREE_OPERAND (arg0, 0),
11538 build_int_cst (NULL_TREE, pow2));
11544 case FLOOR_DIV_EXPR:
11545 /* Simplify A / (B << N) where A and B are positive and B is
11546 a power of 2, to A >> (N + log2(B)). */
11547 strict_overflow_p = false;
11548 if (TREE_CODE (arg1) == LSHIFT_EXPR
11549 && (TYPE_UNSIGNED (type)
11550 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11552 tree sval = TREE_OPERAND (arg1, 0);
11553 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11555 tree sh_cnt = TREE_OPERAND (arg1, 1);
11556 unsigned long pow2;
11558 if (TREE_INT_CST_LOW (sval))
11559 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11561 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11562 + HOST_BITS_PER_WIDE_INT;
11564 if (strict_overflow_p)
11565 fold_overflow_warning (("assuming signed overflow does not "
11566 "occur when simplifying A / (B << N)"),
11567 WARN_STRICT_OVERFLOW_MISC);
11569 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11570 sh_cnt, build_int_cst (NULL_TREE, pow2));
11571 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11572 fold_convert_loc (loc, type, arg0), sh_cnt);
11576 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11577 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11578 if (INTEGRAL_TYPE_P (type)
11579 && TYPE_UNSIGNED (type)
11580 && code == FLOOR_DIV_EXPR)
11581 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11585 case ROUND_DIV_EXPR:
11586 case CEIL_DIV_EXPR:
11587 case EXACT_DIV_EXPR:
11588 if (integer_onep (arg1))
11589 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11590 if (integer_zerop (arg1))
11592 /* X / -1 is -X. */
11593 if (!TYPE_UNSIGNED (type)
11594 && TREE_CODE (arg1) == INTEGER_CST
11595 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11596 && TREE_INT_CST_HIGH (arg1) == -1)
11597 return fold_convert_loc (loc, type, negate_expr (arg0));
11599 /* Convert -A / -B to A / B when the type is signed and overflow is
11601 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11602 && TREE_CODE (arg0) == NEGATE_EXPR
11603 && negate_expr_p (arg1))
11605 if (INTEGRAL_TYPE_P (type))
11606 fold_overflow_warning (("assuming signed overflow does not occur "
11607 "when distributing negation across "
11609 WARN_STRICT_OVERFLOW_MISC);
11610 return fold_build2_loc (loc, code, type,
11611 fold_convert_loc (loc, type,
11612 TREE_OPERAND (arg0, 0)),
11613 fold_convert_loc (loc, type,
11614 negate_expr (arg1)));
11616 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11617 && TREE_CODE (arg1) == NEGATE_EXPR
11618 && negate_expr_p (arg0))
11620 if (INTEGRAL_TYPE_P (type))
11621 fold_overflow_warning (("assuming signed overflow does not occur "
11622 "when distributing negation across "
11624 WARN_STRICT_OVERFLOW_MISC);
11625 return fold_build2_loc (loc, code, type,
11626 fold_convert_loc (loc, type,
11627 negate_expr (arg0)),
11628 fold_convert_loc (loc, type,
11629 TREE_OPERAND (arg1, 0)));
11632 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11633 operation, EXACT_DIV_EXPR.
11635 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11636 At one time others generated faster code, it's not clear if they do
11637 after the last round to changes to the DIV code in expmed.c. */
11638 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11639 && multiple_of_p (type, arg0, arg1))
11640 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11642 strict_overflow_p = false;
11643 if (TREE_CODE (arg1) == INTEGER_CST
11644 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11645 &strict_overflow_p)))
11647 if (strict_overflow_p)
11648 fold_overflow_warning (("assuming signed overflow does not occur "
11649 "when simplifying division"),
11650 WARN_STRICT_OVERFLOW_MISC);
11651 return fold_convert_loc (loc, type, tem);
11656 case CEIL_MOD_EXPR:
11657 case FLOOR_MOD_EXPR:
11658 case ROUND_MOD_EXPR:
11659 case TRUNC_MOD_EXPR:
11660 /* X % 1 is always zero, but be sure to preserve any side
11662 if (integer_onep (arg1))
11663 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11665 /* X % 0, return X % 0 unchanged so that we can get the
11666 proper warnings and errors. */
11667 if (integer_zerop (arg1))
11670 /* 0 % X is always zero, but be sure to preserve any side
11671 effects in X. Place this after checking for X == 0. */
11672 if (integer_zerop (arg0))
11673 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11675 /* X % -1 is zero. */
11676 if (!TYPE_UNSIGNED (type)
11677 && TREE_CODE (arg1) == INTEGER_CST
11678 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11679 && TREE_INT_CST_HIGH (arg1) == -1)
11680 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11682 /* X % -C is the same as X % C. */
11683 if (code == TRUNC_MOD_EXPR
11684 && !TYPE_UNSIGNED (type)
11685 && TREE_CODE (arg1) == INTEGER_CST
11686 && !TREE_OVERFLOW (arg1)
11687 && TREE_INT_CST_HIGH (arg1) < 0
11688 && !TYPE_OVERFLOW_TRAPS (type)
11689 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11690 && !sign_bit_p (arg1, arg1))
11691 return fold_build2_loc (loc, code, type,
11692 fold_convert_loc (loc, type, arg0),
11693 fold_convert_loc (loc, type,
11694 negate_expr (arg1)));
11696 /* X % -Y is the same as X % Y. */
11697 if (code == TRUNC_MOD_EXPR
11698 && !TYPE_UNSIGNED (type)
11699 && TREE_CODE (arg1) == NEGATE_EXPR
11700 && !TYPE_OVERFLOW_TRAPS (type))
11701 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11702 fold_convert_loc (loc, type,
11703 TREE_OPERAND (arg1, 0)));
11705 strict_overflow_p = false;
11706 if (TREE_CODE (arg1) == INTEGER_CST
11707 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11708 &strict_overflow_p)))
11710 if (strict_overflow_p)
11711 fold_overflow_warning (("assuming signed overflow does not occur "
11712 "when simplifying modulus"),
11713 WARN_STRICT_OVERFLOW_MISC);
11714 return fold_convert_loc (loc, type, tem);
11717 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11718 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11719 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11720 && (TYPE_UNSIGNED (type)
11721 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11724 /* Also optimize A % (C << N) where C is a power of 2,
11725 to A & ((C << N) - 1). */
11726 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11727 c = TREE_OPERAND (arg1, 0);
11729 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11732 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11733 build_int_cst (TREE_TYPE (arg1), 1));
11734 if (strict_overflow_p)
11735 fold_overflow_warning (("assuming signed overflow does not "
11736 "occur when simplifying "
11737 "X % (power of two)"),
11738 WARN_STRICT_OVERFLOW_MISC);
11739 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11740 fold_convert_loc (loc, type, arg0),
11741 fold_convert_loc (loc, type, mask));
11749 if (integer_all_onesp (arg0))
11750 return omit_one_operand_loc (loc, type, arg0, arg1);
11754 /* Optimize -1 >> x for arithmetic right shifts. */
11755 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11756 && tree_expr_nonnegative_p (arg1))
11757 return omit_one_operand_loc (loc, type, arg0, arg1);
11758 /* ... fall through ... */
11762 if (integer_zerop (arg1))
11763 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11764 if (integer_zerop (arg0))
11765 return omit_one_operand_loc (loc, type, arg0, arg1);
11767 /* Since negative shift count is not well-defined,
11768 don't try to compute it in the compiler. */
11769 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11772 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11773 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11774 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11775 && host_integerp (TREE_OPERAND (arg0, 1), false)
11776 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11778 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11779 + TREE_INT_CST_LOW (arg1));
11781 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11782 being well defined. */
11783 if (low >= TYPE_PRECISION (type))
11785 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11786 low = low % TYPE_PRECISION (type);
11787 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11788 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11789 TREE_OPERAND (arg0, 0));
11791 low = TYPE_PRECISION (type) - 1;
11794 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11795 build_int_cst (type, low));
11798 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11799 into x & ((unsigned)-1 >> c) for unsigned types. */
11800 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11801 || (TYPE_UNSIGNED (type)
11802 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11803 && host_integerp (arg1, false)
11804 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11805 && host_integerp (TREE_OPERAND (arg0, 1), false)
11806 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11808 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11809 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11815 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11817 lshift = build_int_cst (type, -1);
11818 lshift = int_const_binop (code, lshift, arg1, 0);
11820 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11824 /* Rewrite an LROTATE_EXPR by a constant into an
11825 RROTATE_EXPR by a new constant. */
11826 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11828 tree tem = build_int_cst (TREE_TYPE (arg1),
11829 TYPE_PRECISION (type));
11830 tem = const_binop (MINUS_EXPR, tem, arg1);
11831 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11834 /* If we have a rotate of a bit operation with the rotate count and
11835 the second operand of the bit operation both constant,
11836 permute the two operations. */
11837 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11838 && (TREE_CODE (arg0) == BIT_AND_EXPR
11839 || TREE_CODE (arg0) == BIT_IOR_EXPR
11840 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11841 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11842 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11843 fold_build2_loc (loc, code, type,
11844 TREE_OPERAND (arg0, 0), arg1),
11845 fold_build2_loc (loc, code, type,
11846 TREE_OPERAND (arg0, 1), arg1));
11848 /* Two consecutive rotates adding up to the precision of the
11849 type can be ignored. */
11850 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11851 && TREE_CODE (arg0) == RROTATE_EXPR
11852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11853 && TREE_INT_CST_HIGH (arg1) == 0
11854 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11855 && ((TREE_INT_CST_LOW (arg1)
11856 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11857 == (unsigned int) TYPE_PRECISION (type)))
11858 return TREE_OPERAND (arg0, 0);
11860 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11861 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11862 if the latter can be further optimized. */
11863 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11864 && TREE_CODE (arg0) == BIT_AND_EXPR
11865 && TREE_CODE (arg1) == INTEGER_CST
11866 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11868 tree mask = fold_build2_loc (loc, code, type,
11869 fold_convert_loc (loc, type,
11870 TREE_OPERAND (arg0, 1)),
11872 tree shift = fold_build2_loc (loc, code, type,
11873 fold_convert_loc (loc, type,
11874 TREE_OPERAND (arg0, 0)),
11876 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11884 if (operand_equal_p (arg0, arg1, 0))
11885 return omit_one_operand_loc (loc, type, arg0, arg1);
11886 if (INTEGRAL_TYPE_P (type)
11887 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11888 return omit_one_operand_loc (loc, type, arg1, arg0);
11889 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11895 if (operand_equal_p (arg0, arg1, 0))
11896 return omit_one_operand_loc (loc, type, arg0, arg1);
11897 if (INTEGRAL_TYPE_P (type)
11898 && TYPE_MAX_VALUE (type)
11899 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11900 return omit_one_operand_loc (loc, type, arg1, arg0);
11901 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11906 case TRUTH_ANDIF_EXPR:
11907 /* Note that the operands of this must be ints
11908 and their values must be 0 or 1.
11909 ("true" is a fixed value perhaps depending on the language.) */
11910 /* If first arg is constant zero, return it. */
11911 if (integer_zerop (arg0))
11912 return fold_convert_loc (loc, type, arg0);
11913 case TRUTH_AND_EXPR:
11914 /* If either arg is constant true, drop it. */
11915 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11916 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11917 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11918 /* Preserve sequence points. */
11919 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11920 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11921 /* If second arg is constant zero, result is zero, but first arg
11922 must be evaluated. */
11923 if (integer_zerop (arg1))
11924 return omit_one_operand_loc (loc, type, arg1, arg0);
11925 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11926 case will be handled here. */
11927 if (integer_zerop (arg0))
11928 return omit_one_operand_loc (loc, type, arg0, arg1);
11930 /* !X && X is always false. */
11931 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11932 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11933 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11934 /* X && !X is always false. */
11935 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11936 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11937 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11939 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11940 means A >= Y && A != MAX, but in this case we know that
11943 if (!TREE_SIDE_EFFECTS (arg0)
11944 && !TREE_SIDE_EFFECTS (arg1))
11946 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11947 if (tem && !operand_equal_p (tem, arg0, 0))
11948 return fold_build2_loc (loc, code, type, tem, arg1);
11950 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11951 if (tem && !operand_equal_p (tem, arg1, 0))
11952 return fold_build2_loc (loc, code, type, arg0, tem);
11956 /* We only do these simplifications if we are optimizing. */
11960 /* Check for things like (A || B) && (A || C). We can convert this
11961 to A || (B && C). Note that either operator can be any of the four
11962 truth and/or operations and the transformation will still be
11963 valid. Also note that we only care about order for the
11964 ANDIF and ORIF operators. If B contains side effects, this
11965 might change the truth-value of A. */
11966 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11967 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11968 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11969 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11970 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11971 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11973 tree a00 = TREE_OPERAND (arg0, 0);
11974 tree a01 = TREE_OPERAND (arg0, 1);
11975 tree a10 = TREE_OPERAND (arg1, 0);
11976 tree a11 = TREE_OPERAND (arg1, 1);
11977 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11978 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11979 && (code == TRUTH_AND_EXPR
11980 || code == TRUTH_OR_EXPR));
11982 if (operand_equal_p (a00, a10, 0))
11983 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11984 fold_build2_loc (loc, code, type, a01, a11));
11985 else if (commutative && operand_equal_p (a00, a11, 0))
11986 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11987 fold_build2_loc (loc, code, type, a01, a10));
11988 else if (commutative && operand_equal_p (a01, a10, 0))
11989 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11990 fold_build2_loc (loc, code, type, a00, a11));
11992 /* This case if tricky because we must either have commutative
11993 operators or else A10 must not have side-effects. */
11995 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11996 && operand_equal_p (a01, a11, 0))
11997 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11998 fold_build2_loc (loc, code, type, a00, a10),
12002 /* See if we can build a range comparison. */
12003 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12006 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12007 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12009 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12011 return fold_build2_loc (loc, code, type, tem, arg1);
12014 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12015 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12017 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12019 return fold_build2_loc (loc, code, type, arg0, tem);
12022 /* Check for the possibility of merging component references. If our
12023 lhs is another similar operation, try to merge its rhs with our
12024 rhs. Then try to merge our lhs and rhs. */
12025 if (TREE_CODE (arg0) == code
12026 && 0 != (tem = fold_truthop (loc, code, type,
12027 TREE_OPERAND (arg0, 1), arg1)))
12028 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12030 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12035 case TRUTH_ORIF_EXPR:
12036 /* Note that the operands of this must be ints
12037 and their values must be 0 or true.
12038 ("true" is a fixed value perhaps depending on the language.) */
12039 /* If first arg is constant true, return it. */
12040 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12041 return fold_convert_loc (loc, type, arg0);
12042 case TRUTH_OR_EXPR:
12043 /* If either arg is constant zero, drop it. */
12044 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12045 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12046 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12047 /* Preserve sequence points. */
12048 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12049 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12050 /* If second arg is constant true, result is true, but we must
12051 evaluate first arg. */
12052 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12053 return omit_one_operand_loc (loc, type, arg1, arg0);
12054 /* Likewise for first arg, but note this only occurs here for
12056 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12057 return omit_one_operand_loc (loc, type, arg0, arg1);
12059 /* !X || X is always true. */
12060 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12061 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12062 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12063 /* X || !X is always true. */
12064 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12065 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12066 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12070 case TRUTH_XOR_EXPR:
12071 /* If the second arg is constant zero, drop it. */
12072 if (integer_zerop (arg1))
12073 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12074 /* If the second arg is constant true, this is a logical inversion. */
12075 if (integer_onep (arg1))
12077 /* Only call invert_truthvalue if operand is a truth value. */
12078 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12079 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12081 tem = invert_truthvalue_loc (loc, arg0);
12082 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12084 /* Identical arguments cancel to zero. */
12085 if (operand_equal_p (arg0, arg1, 0))
12086 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12088 /* !X ^ X is always true. */
12089 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12090 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12091 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12093 /* X ^ !X is always true. */
12094 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12095 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12096 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12102 tem = fold_comparison (loc, code, type, op0, op1);
12103 if (tem != NULL_TREE)
12106 /* bool_var != 0 becomes bool_var. */
12107 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12108 && code == NE_EXPR)
12109 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12111 /* bool_var == 1 becomes bool_var. */
12112 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12113 && code == EQ_EXPR)
12114 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12116 /* bool_var != 1 becomes !bool_var. */
12117 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12118 && code == NE_EXPR)
12119 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12120 fold_convert_loc (loc, type, arg0));
12122 /* bool_var == 0 becomes !bool_var. */
12123 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12124 && code == EQ_EXPR)
12125 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12126 fold_convert_loc (loc, type, arg0));
12128 /* !exp != 0 becomes !exp */
12129 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12130 && code == NE_EXPR)
12131 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12133 /* If this is an equality comparison of the address of two non-weak,
12134 unaliased symbols neither of which are extern (since we do not
12135 have access to attributes for externs), then we know the result. */
12136 if (TREE_CODE (arg0) == ADDR_EXPR
12137 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12138 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12139 && ! lookup_attribute ("alias",
12140 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12141 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12142 && TREE_CODE (arg1) == ADDR_EXPR
12143 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12144 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12145 && ! lookup_attribute ("alias",
12146 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12147 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12149 /* We know that we're looking at the address of two
12150 non-weak, unaliased, static _DECL nodes.
12152 It is both wasteful and incorrect to call operand_equal_p
12153 to compare the two ADDR_EXPR nodes. It is wasteful in that
12154 all we need to do is test pointer equality for the arguments
12155 to the two ADDR_EXPR nodes. It is incorrect to use
12156 operand_equal_p as that function is NOT equivalent to a
12157 C equality test. It can in fact return false for two
12158 objects which would test as equal using the C equality
12160 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12161 return constant_boolean_node (equal
12162 ? code == EQ_EXPR : code != EQ_EXPR,
12166 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12167 a MINUS_EXPR of a constant, we can convert it into a comparison with
12168 a revised constant as long as no overflow occurs. */
12169 if (TREE_CODE (arg1) == INTEGER_CST
12170 && (TREE_CODE (arg0) == PLUS_EXPR
12171 || TREE_CODE (arg0) == MINUS_EXPR)
12172 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12173 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12174 ? MINUS_EXPR : PLUS_EXPR,
12175 fold_convert_loc (loc, TREE_TYPE (arg0),
12177 TREE_OPERAND (arg0, 1)))
12178 && !TREE_OVERFLOW (tem))
12179 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12181 /* Similarly for a NEGATE_EXPR. */
12182 if (TREE_CODE (arg0) == NEGATE_EXPR
12183 && TREE_CODE (arg1) == INTEGER_CST
12184 && 0 != (tem = negate_expr (arg1))
12185 && TREE_CODE (tem) == INTEGER_CST
12186 && !TREE_OVERFLOW (tem))
12187 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12189 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12190 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12191 && TREE_CODE (arg1) == INTEGER_CST
12192 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12193 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12194 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12195 fold_convert_loc (loc,
12198 TREE_OPERAND (arg0, 1)));
12200 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12201 if ((TREE_CODE (arg0) == PLUS_EXPR
12202 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12203 || TREE_CODE (arg0) == MINUS_EXPR)
12204 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12205 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12206 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12208 tree val = TREE_OPERAND (arg0, 1);
12209 return omit_two_operands_loc (loc, type,
12210 fold_build2_loc (loc, code, type,
12212 build_int_cst (TREE_TYPE (val),
12214 TREE_OPERAND (arg0, 0), arg1);
12217 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12218 if (TREE_CODE (arg0) == MINUS_EXPR
12219 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12220 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12221 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12223 return omit_two_operands_loc (loc, type,
12225 ? boolean_true_node : boolean_false_node,
12226 TREE_OPERAND (arg0, 1), arg1);
12229 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12230 for !=. Don't do this for ordered comparisons due to overflow. */
12231 if (TREE_CODE (arg0) == MINUS_EXPR
12232 && integer_zerop (arg1))
12233 return fold_build2_loc (loc, code, type,
12234 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12236 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12237 if (TREE_CODE (arg0) == ABS_EXPR
12238 && (integer_zerop (arg1) || real_zerop (arg1)))
12239 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12241 /* If this is an EQ or NE comparison with zero and ARG0 is
12242 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12243 two operations, but the latter can be done in one less insn
12244 on machines that have only two-operand insns or on which a
12245 constant cannot be the first operand. */
12246 if (TREE_CODE (arg0) == BIT_AND_EXPR
12247 && integer_zerop (arg1))
12249 tree arg00 = TREE_OPERAND (arg0, 0);
12250 tree arg01 = TREE_OPERAND (arg0, 1);
12251 if (TREE_CODE (arg00) == LSHIFT_EXPR
12252 && integer_onep (TREE_OPERAND (arg00, 0)))
12254 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12255 arg01, TREE_OPERAND (arg00, 1));
12256 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12257 build_int_cst (TREE_TYPE (arg0), 1));
12258 return fold_build2_loc (loc, code, type,
12259 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12262 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12263 && integer_onep (TREE_OPERAND (arg01, 0)))
12265 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12266 arg00, TREE_OPERAND (arg01, 1));
12267 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12268 build_int_cst (TREE_TYPE (arg0), 1));
12269 return fold_build2_loc (loc, code, type,
12270 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12275 /* If this is an NE or EQ comparison of zero against the result of a
12276 signed MOD operation whose second operand is a power of 2, make
12277 the MOD operation unsigned since it is simpler and equivalent. */
12278 if (integer_zerop (arg1)
12279 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12280 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12281 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12282 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12283 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12284 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12286 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12287 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12288 fold_convert_loc (loc, newtype,
12289 TREE_OPERAND (arg0, 0)),
12290 fold_convert_loc (loc, newtype,
12291 TREE_OPERAND (arg0, 1)));
12293 return fold_build2_loc (loc, code, type, newmod,
12294 fold_convert_loc (loc, newtype, arg1));
12297 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12298 C1 is a valid shift constant, and C2 is a power of two, i.e.
12300 if (TREE_CODE (arg0) == BIT_AND_EXPR
12301 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12302 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12304 && integer_pow2p (TREE_OPERAND (arg0, 1))
12305 && integer_zerop (arg1))
12307 tree itype = TREE_TYPE (arg0);
12308 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12309 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12311 /* Check for a valid shift count. */
12312 if (TREE_INT_CST_HIGH (arg001) == 0
12313 && TREE_INT_CST_LOW (arg001) < prec)
12315 tree arg01 = TREE_OPERAND (arg0, 1);
12316 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12317 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12318 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12319 can be rewritten as (X & (C2 << C1)) != 0. */
12320 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12322 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12323 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12324 return fold_build2_loc (loc, code, type, tem, arg1);
12326 /* Otherwise, for signed (arithmetic) shifts,
12327 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12328 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12329 else if (!TYPE_UNSIGNED (itype))
12330 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12331 arg000, build_int_cst (itype, 0));
12332 /* Otherwise, of unsigned (logical) shifts,
12333 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12334 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12336 return omit_one_operand_loc (loc, type,
12337 code == EQ_EXPR ? integer_one_node
12338 : integer_zero_node,
12343 /* If this is an NE comparison of zero with an AND of one, remove the
12344 comparison since the AND will give the correct value. */
12345 if (code == NE_EXPR
12346 && integer_zerop (arg1)
12347 && TREE_CODE (arg0) == BIT_AND_EXPR
12348 && integer_onep (TREE_OPERAND (arg0, 1)))
12349 return fold_convert_loc (loc, type, arg0);
12351 /* If we have (A & C) == C where C is a power of 2, convert this into
12352 (A & C) != 0. Similarly for NE_EXPR. */
12353 if (TREE_CODE (arg0) == BIT_AND_EXPR
12354 && integer_pow2p (TREE_OPERAND (arg0, 1))
12355 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12356 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12357 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12358 integer_zero_node));
12360 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12361 bit, then fold the expression into A < 0 or A >= 0. */
12362 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12366 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12367 Similarly for NE_EXPR. */
12368 if (TREE_CODE (arg0) == BIT_AND_EXPR
12369 && TREE_CODE (arg1) == INTEGER_CST
12370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12372 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12373 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12374 TREE_OPERAND (arg0, 1));
12375 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12377 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12378 if (integer_nonzerop (dandnotc))
12379 return omit_one_operand_loc (loc, type, rslt, arg0);
12382 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12383 Similarly for NE_EXPR. */
12384 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12385 && TREE_CODE (arg1) == INTEGER_CST
12386 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12388 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12389 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12390 TREE_OPERAND (arg0, 1), notd);
12391 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12392 if (integer_nonzerop (candnotd))
12393 return omit_one_operand_loc (loc, type, rslt, arg0);
12396 /* If this is a comparison of a field, we may be able to simplify it. */
12397 if ((TREE_CODE (arg0) == COMPONENT_REF
12398 || TREE_CODE (arg0) == BIT_FIELD_REF)
12399 /* Handle the constant case even without -O
12400 to make sure the warnings are given. */
12401 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12403 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12408 /* Optimize comparisons of strlen vs zero to a compare of the
12409 first character of the string vs zero. To wit,
12410 strlen(ptr) == 0 => *ptr == 0
12411 strlen(ptr) != 0 => *ptr != 0
12412 Other cases should reduce to one of these two (or a constant)
12413 due to the return value of strlen being unsigned. */
12414 if (TREE_CODE (arg0) == CALL_EXPR
12415 && integer_zerop (arg1))
12417 tree fndecl = get_callee_fndecl (arg0);
12420 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12421 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12422 && call_expr_nargs (arg0) == 1
12423 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12425 tree iref = build_fold_indirect_ref_loc (loc,
12426 CALL_EXPR_ARG (arg0, 0));
12427 return fold_build2_loc (loc, code, type, iref,
12428 build_int_cst (TREE_TYPE (iref), 0));
12432 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12433 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12434 if (TREE_CODE (arg0) == RSHIFT_EXPR
12435 && integer_zerop (arg1)
12436 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12438 tree arg00 = TREE_OPERAND (arg0, 0);
12439 tree arg01 = TREE_OPERAND (arg0, 1);
12440 tree itype = TREE_TYPE (arg00);
12441 if (TREE_INT_CST_HIGH (arg01) == 0
12442 && TREE_INT_CST_LOW (arg01)
12443 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12445 if (TYPE_UNSIGNED (itype))
12447 itype = signed_type_for (itype);
12448 arg00 = fold_convert_loc (loc, itype, arg00);
12450 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12451 type, arg00, build_int_cst (itype, 0));
12455 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12456 if (integer_zerop (arg1)
12457 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12458 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12459 TREE_OPERAND (arg0, 1));
12461 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12462 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12463 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12464 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12465 build_int_cst (TREE_TYPE (arg1), 0));
12466 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12467 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12468 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12469 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12470 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12471 build_int_cst (TREE_TYPE (arg1), 0));
12473 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12474 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12475 && TREE_CODE (arg1) == INTEGER_CST
12476 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12477 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12478 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12479 TREE_OPERAND (arg0, 1), arg1));
12481 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12482 (X & C) == 0 when C is a single bit. */
12483 if (TREE_CODE (arg0) == BIT_AND_EXPR
12484 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12485 && integer_zerop (arg1)
12486 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12488 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12489 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12490 TREE_OPERAND (arg0, 1));
12491 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12495 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12496 constant C is a power of two, i.e. a single bit. */
12497 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12498 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12499 && integer_zerop (arg1)
12500 && integer_pow2p (TREE_OPERAND (arg0, 1))
12501 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12502 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12504 tree arg00 = TREE_OPERAND (arg0, 0);
12505 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12506 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12509 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12510 when is C is a power of two, i.e. a single bit. */
12511 if (TREE_CODE (arg0) == BIT_AND_EXPR
12512 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12513 && integer_zerop (arg1)
12514 && integer_pow2p (TREE_OPERAND (arg0, 1))
12515 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12516 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12518 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12519 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12520 arg000, TREE_OPERAND (arg0, 1));
12521 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12522 tem, build_int_cst (TREE_TYPE (tem), 0));
12525 if (integer_zerop (arg1)
12526 && tree_expr_nonzero_p (arg0))
12528 tree res = constant_boolean_node (code==NE_EXPR, type);
12529 return omit_one_operand_loc (loc, type, res, arg0);
12532 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12533 if (TREE_CODE (arg0) == NEGATE_EXPR
12534 && TREE_CODE (arg1) == NEGATE_EXPR)
12535 return fold_build2_loc (loc, code, type,
12536 TREE_OPERAND (arg0, 0),
12537 TREE_OPERAND (arg1, 0));
12539 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12540 if (TREE_CODE (arg0) == BIT_AND_EXPR
12541 && TREE_CODE (arg1) == BIT_AND_EXPR)
12543 tree arg00 = TREE_OPERAND (arg0, 0);
12544 tree arg01 = TREE_OPERAND (arg0, 1);
12545 tree arg10 = TREE_OPERAND (arg1, 0);
12546 tree arg11 = TREE_OPERAND (arg1, 1);
12547 tree itype = TREE_TYPE (arg0);
12549 if (operand_equal_p (arg01, arg11, 0))
12550 return fold_build2_loc (loc, code, type,
12551 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12552 fold_build2_loc (loc,
12553 BIT_XOR_EXPR, itype,
12556 build_int_cst (itype, 0));
12558 if (operand_equal_p (arg01, arg10, 0))
12559 return fold_build2_loc (loc, code, type,
12560 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12561 fold_build2_loc (loc,
12562 BIT_XOR_EXPR, itype,
12565 build_int_cst (itype, 0));
12567 if (operand_equal_p (arg00, arg11, 0))
12568 return fold_build2_loc (loc, code, type,
12569 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12570 fold_build2_loc (loc,
12571 BIT_XOR_EXPR, itype,
12574 build_int_cst (itype, 0));
12576 if (operand_equal_p (arg00, arg10, 0))
12577 return fold_build2_loc (loc, code, type,
12578 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12579 fold_build2_loc (loc,
12580 BIT_XOR_EXPR, itype,
12583 build_int_cst (itype, 0));
12586 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12587 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12589 tree arg00 = TREE_OPERAND (arg0, 0);
12590 tree arg01 = TREE_OPERAND (arg0, 1);
12591 tree arg10 = TREE_OPERAND (arg1, 0);
12592 tree arg11 = TREE_OPERAND (arg1, 1);
12593 tree itype = TREE_TYPE (arg0);
12595 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12596 operand_equal_p guarantees no side-effects so we don't need
12597 to use omit_one_operand on Z. */
12598 if (operand_equal_p (arg01, arg11, 0))
12599 return fold_build2_loc (loc, code, type, arg00, arg10);
12600 if (operand_equal_p (arg01, arg10, 0))
12601 return fold_build2_loc (loc, code, type, arg00, arg11);
12602 if (operand_equal_p (arg00, arg11, 0))
12603 return fold_build2_loc (loc, code, type, arg01, arg10);
12604 if (operand_equal_p (arg00, arg10, 0))
12605 return fold_build2_loc (loc, code, type, arg01, arg11);
12607 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12608 if (TREE_CODE (arg01) == INTEGER_CST
12609 && TREE_CODE (arg11) == INTEGER_CST)
12610 return fold_build2_loc (loc, code, type,
12611 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12612 fold_build2_loc (loc,
12613 BIT_XOR_EXPR, itype,
12618 /* Attempt to simplify equality/inequality comparisons of complex
12619 values. Only lower the comparison if the result is known or
12620 can be simplified to a single scalar comparison. */
12621 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12622 || TREE_CODE (arg0) == COMPLEX_CST)
12623 && (TREE_CODE (arg1) == COMPLEX_EXPR
12624 || TREE_CODE (arg1) == COMPLEX_CST))
12626 tree real0, imag0, real1, imag1;
12629 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12631 real0 = TREE_OPERAND (arg0, 0);
12632 imag0 = TREE_OPERAND (arg0, 1);
12636 real0 = TREE_REALPART (arg0);
12637 imag0 = TREE_IMAGPART (arg0);
12640 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12642 real1 = TREE_OPERAND (arg1, 0);
12643 imag1 = TREE_OPERAND (arg1, 1);
12647 real1 = TREE_REALPART (arg1);
12648 imag1 = TREE_IMAGPART (arg1);
12651 rcond = fold_binary_loc (loc, code, type, real0, real1);
12652 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12654 if (integer_zerop (rcond))
12656 if (code == EQ_EXPR)
12657 return omit_two_operands_loc (loc, type, boolean_false_node,
12659 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12663 if (code == NE_EXPR)
12664 return omit_two_operands_loc (loc, type, boolean_true_node,
12666 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12670 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12671 if (icond && TREE_CODE (icond) == INTEGER_CST)
12673 if (integer_zerop (icond))
12675 if (code == EQ_EXPR)
12676 return omit_two_operands_loc (loc, type, boolean_false_node,
12678 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12682 if (code == NE_EXPR)
12683 return omit_two_operands_loc (loc, type, boolean_true_node,
12685 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12696 tem = fold_comparison (loc, code, type, op0, op1);
12697 if (tem != NULL_TREE)
12700 /* Transform comparisons of the form X +- C CMP X. */
12701 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12702 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12703 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12704 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12705 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12706 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12708 tree arg01 = TREE_OPERAND (arg0, 1);
12709 enum tree_code code0 = TREE_CODE (arg0);
12712 if (TREE_CODE (arg01) == REAL_CST)
12713 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12715 is_positive = tree_int_cst_sgn (arg01);
12717 /* (X - c) > X becomes false. */
12718 if (code == GT_EXPR
12719 && ((code0 == MINUS_EXPR && is_positive >= 0)
12720 || (code0 == PLUS_EXPR && is_positive <= 0)))
12722 if (TREE_CODE (arg01) == INTEGER_CST
12723 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12724 fold_overflow_warning (("assuming signed overflow does not "
12725 "occur when assuming that (X - c) > X "
12726 "is always false"),
12727 WARN_STRICT_OVERFLOW_ALL);
12728 return constant_boolean_node (0, type);
12731 /* Likewise (X + c) < X becomes false. */
12732 if (code == LT_EXPR
12733 && ((code0 == PLUS_EXPR && is_positive >= 0)
12734 || (code0 == MINUS_EXPR && is_positive <= 0)))
12736 if (TREE_CODE (arg01) == INTEGER_CST
12737 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12738 fold_overflow_warning (("assuming signed overflow does not "
12739 "occur when assuming that "
12740 "(X + c) < X is always false"),
12741 WARN_STRICT_OVERFLOW_ALL);
12742 return constant_boolean_node (0, type);
12745 /* Convert (X - c) <= X to true. */
12746 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12748 && ((code0 == MINUS_EXPR && is_positive >= 0)
12749 || (code0 == PLUS_EXPR && is_positive <= 0)))
12751 if (TREE_CODE (arg01) == INTEGER_CST
12752 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12753 fold_overflow_warning (("assuming signed overflow does not "
12754 "occur when assuming that "
12755 "(X - c) <= X is always true"),
12756 WARN_STRICT_OVERFLOW_ALL);
12757 return constant_boolean_node (1, type);
12760 /* Convert (X + c) >= X to true. */
12761 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12763 && ((code0 == PLUS_EXPR && is_positive >= 0)
12764 || (code0 == MINUS_EXPR && is_positive <= 0)))
12766 if (TREE_CODE (arg01) == INTEGER_CST
12767 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12768 fold_overflow_warning (("assuming signed overflow does not "
12769 "occur when assuming that "
12770 "(X + c) >= X is always true"),
12771 WARN_STRICT_OVERFLOW_ALL);
12772 return constant_boolean_node (1, type);
12775 if (TREE_CODE (arg01) == INTEGER_CST)
12777 /* Convert X + c > X and X - c < X to true for integers. */
12778 if (code == GT_EXPR
12779 && ((code0 == PLUS_EXPR && is_positive > 0)
12780 || (code0 == MINUS_EXPR && is_positive < 0)))
12782 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12783 fold_overflow_warning (("assuming signed overflow does "
12784 "not occur when assuming that "
12785 "(X + c) > X is always true"),
12786 WARN_STRICT_OVERFLOW_ALL);
12787 return constant_boolean_node (1, type);
12790 if (code == LT_EXPR
12791 && ((code0 == MINUS_EXPR && is_positive > 0)
12792 || (code0 == PLUS_EXPR && is_positive < 0)))
12794 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12795 fold_overflow_warning (("assuming signed overflow does "
12796 "not occur when assuming that "
12797 "(X - c) < X is always true"),
12798 WARN_STRICT_OVERFLOW_ALL);
12799 return constant_boolean_node (1, type);
12802 /* Convert X + c <= X and X - c >= X to false for integers. */
12803 if (code == LE_EXPR
12804 && ((code0 == PLUS_EXPR && is_positive > 0)
12805 || (code0 == MINUS_EXPR && is_positive < 0)))
12807 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12808 fold_overflow_warning (("assuming signed overflow does "
12809 "not occur when assuming that "
12810 "(X + c) <= X is always false"),
12811 WARN_STRICT_OVERFLOW_ALL);
12812 return constant_boolean_node (0, type);
12815 if (code == GE_EXPR
12816 && ((code0 == MINUS_EXPR && is_positive > 0)
12817 || (code0 == PLUS_EXPR && is_positive < 0)))
12819 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12820 fold_overflow_warning (("assuming signed overflow does "
12821 "not occur when assuming that "
12822 "(X - c) >= X is always false"),
12823 WARN_STRICT_OVERFLOW_ALL);
12824 return constant_boolean_node (0, type);
12829 /* Comparisons with the highest or lowest possible integer of
12830 the specified precision will have known values. */
12832 tree arg1_type = TREE_TYPE (arg1);
12833 unsigned int width = TYPE_PRECISION (arg1_type);
12835 if (TREE_CODE (arg1) == INTEGER_CST
12836 && width <= 2 * HOST_BITS_PER_WIDE_INT
12837 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12839 HOST_WIDE_INT signed_max_hi;
12840 unsigned HOST_WIDE_INT signed_max_lo;
12841 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12843 if (width <= HOST_BITS_PER_WIDE_INT)
12845 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12850 if (TYPE_UNSIGNED (arg1_type))
12852 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12858 max_lo = signed_max_lo;
12859 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12865 width -= HOST_BITS_PER_WIDE_INT;
12866 signed_max_lo = -1;
12867 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12872 if (TYPE_UNSIGNED (arg1_type))
12874 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12879 max_hi = signed_max_hi;
12880 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12884 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12885 && TREE_INT_CST_LOW (arg1) == max_lo)
12889 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12892 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12895 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12898 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12900 /* The GE_EXPR and LT_EXPR cases above are not normally
12901 reached because of previous transformations. */
12906 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12908 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12912 arg1 = const_binop (PLUS_EXPR, arg1,
12913 build_int_cst (TREE_TYPE (arg1), 1));
12914 return fold_build2_loc (loc, EQ_EXPR, type,
12915 fold_convert_loc (loc,
12916 TREE_TYPE (arg1), arg0),
12919 arg1 = const_binop (PLUS_EXPR, arg1,
12920 build_int_cst (TREE_TYPE (arg1), 1));
12921 return fold_build2_loc (loc, NE_EXPR, type,
12922 fold_convert_loc (loc, TREE_TYPE (arg1),
12928 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12930 && TREE_INT_CST_LOW (arg1) == min_lo)
12934 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12937 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12940 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12943 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12948 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12950 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12954 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12955 return fold_build2_loc (loc, NE_EXPR, type,
12956 fold_convert_loc (loc,
12957 TREE_TYPE (arg1), arg0),
12960 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12961 return fold_build2_loc (loc, EQ_EXPR, type,
12962 fold_convert_loc (loc, TREE_TYPE (arg1),
12969 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12970 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12971 && TYPE_UNSIGNED (arg1_type)
12972 /* We will flip the signedness of the comparison operator
12973 associated with the mode of arg1, so the sign bit is
12974 specified by this mode. Check that arg1 is the signed
12975 max associated with this sign bit. */
12976 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12977 /* signed_type does not work on pointer types. */
12978 && INTEGRAL_TYPE_P (arg1_type))
12980 /* The following case also applies to X < signed_max+1
12981 and X >= signed_max+1 because previous transformations. */
12982 if (code == LE_EXPR || code == GT_EXPR)
12985 st = signed_type_for (TREE_TYPE (arg1));
12986 return fold_build2_loc (loc,
12987 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12988 type, fold_convert_loc (loc, st, arg0),
12989 build_int_cst (st, 0));
12995 /* If we are comparing an ABS_EXPR with a constant, we can
12996 convert all the cases into explicit comparisons, but they may
12997 well not be faster than doing the ABS and one comparison.
12998 But ABS (X) <= C is a range comparison, which becomes a subtraction
12999 and a comparison, and is probably faster. */
13000 if (code == LE_EXPR
13001 && TREE_CODE (arg1) == INTEGER_CST
13002 && TREE_CODE (arg0) == ABS_EXPR
13003 && ! TREE_SIDE_EFFECTS (arg0)
13004 && (0 != (tem = negate_expr (arg1)))
13005 && TREE_CODE (tem) == INTEGER_CST
13006 && !TREE_OVERFLOW (tem))
13007 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13008 build2 (GE_EXPR, type,
13009 TREE_OPERAND (arg0, 0), tem),
13010 build2 (LE_EXPR, type,
13011 TREE_OPERAND (arg0, 0), arg1));
13013 /* Convert ABS_EXPR<x> >= 0 to true. */
13014 strict_overflow_p = false;
13015 if (code == GE_EXPR
13016 && (integer_zerop (arg1)
13017 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13018 && real_zerop (arg1)))
13019 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13021 if (strict_overflow_p)
13022 fold_overflow_warning (("assuming signed overflow does not occur "
13023 "when simplifying comparison of "
13024 "absolute value and zero"),
13025 WARN_STRICT_OVERFLOW_CONDITIONAL);
13026 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13029 /* Convert ABS_EXPR<x> < 0 to false. */
13030 strict_overflow_p = false;
13031 if (code == LT_EXPR
13032 && (integer_zerop (arg1) || real_zerop (arg1))
13033 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13035 if (strict_overflow_p)
13036 fold_overflow_warning (("assuming signed overflow does not occur "
13037 "when simplifying comparison of "
13038 "absolute value and zero"),
13039 WARN_STRICT_OVERFLOW_CONDITIONAL);
13040 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13043 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13044 and similarly for >= into !=. */
13045 if ((code == LT_EXPR || code == GE_EXPR)
13046 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13047 && TREE_CODE (arg1) == LSHIFT_EXPR
13048 && integer_onep (TREE_OPERAND (arg1, 0)))
13049 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13050 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13051 TREE_OPERAND (arg1, 1)),
13052 build_int_cst (TREE_TYPE (arg0), 0));
13054 if ((code == LT_EXPR || code == GE_EXPR)
13055 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13056 && CONVERT_EXPR_P (arg1)
13057 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13058 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13060 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13061 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13062 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13063 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13064 build_int_cst (TREE_TYPE (arg0), 0));
13069 case UNORDERED_EXPR:
13077 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13079 t1 = fold_relational_const (code, type, arg0, arg1);
13080 if (t1 != NULL_TREE)
13084 /* If the first operand is NaN, the result is constant. */
13085 if (TREE_CODE (arg0) == REAL_CST
13086 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13087 && (code != LTGT_EXPR || ! flag_trapping_math))
13089 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13090 ? integer_zero_node
13091 : integer_one_node;
13092 return omit_one_operand_loc (loc, type, t1, arg1);
13095 /* If the second operand is NaN, the result is constant. */
13096 if (TREE_CODE (arg1) == REAL_CST
13097 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13098 && (code != LTGT_EXPR || ! flag_trapping_math))
13100 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13101 ? integer_zero_node
13102 : integer_one_node;
13103 return omit_one_operand_loc (loc, type, t1, arg0);
13106 /* Simplify unordered comparison of something with itself. */
13107 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13108 && operand_equal_p (arg0, arg1, 0))
13109 return constant_boolean_node (1, type);
13111 if (code == LTGT_EXPR
13112 && !flag_trapping_math
13113 && operand_equal_p (arg0, arg1, 0))
13114 return constant_boolean_node (0, type);
13116 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13118 tree targ0 = strip_float_extensions (arg0);
13119 tree targ1 = strip_float_extensions (arg1);
13120 tree newtype = TREE_TYPE (targ0);
13122 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13123 newtype = TREE_TYPE (targ1);
13125 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13126 return fold_build2_loc (loc, code, type,
13127 fold_convert_loc (loc, newtype, targ0),
13128 fold_convert_loc (loc, newtype, targ1));
13133 case COMPOUND_EXPR:
13134 /* When pedantic, a compound expression can be neither an lvalue
13135 nor an integer constant expression. */
13136 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13138 /* Don't let (0, 0) be null pointer constant. */
13139 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13140 : fold_convert_loc (loc, type, arg1);
13141 return pedantic_non_lvalue_loc (loc, tem);
13144 if ((TREE_CODE (arg0) == REAL_CST
13145 && TREE_CODE (arg1) == REAL_CST)
13146 || (TREE_CODE (arg0) == INTEGER_CST
13147 && TREE_CODE (arg1) == INTEGER_CST))
13148 return build_complex (type, arg0, arg1);
13152 /* An ASSERT_EXPR should never be passed to fold_binary. */
13153 gcc_unreachable ();
13157 } /* switch (code) */
13160 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13161 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13165 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13167 switch (TREE_CODE (*tp))
13173 *walk_subtrees = 0;
13175 /* ... fall through ... */
13182 /* Return whether the sub-tree ST contains a label which is accessible from
13183 outside the sub-tree. */
13186 contains_label_p (tree st)
13189 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13192 /* Fold a ternary expression of code CODE and type TYPE with operands
13193 OP0, OP1, and OP2. Return the folded expression if folding is
13194 successful. Otherwise, return NULL_TREE. */
13197 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13198 tree op0, tree op1, tree op2)
13201 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13202 enum tree_code_class kind = TREE_CODE_CLASS (code);
13204 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13205 && TREE_CODE_LENGTH (code) == 3);
13207 /* Strip any conversions that don't change the mode. This is safe
13208 for every expression, except for a comparison expression because
13209 its signedness is derived from its operands. So, in the latter
13210 case, only strip conversions that don't change the signedness.
13212 Note that this is done as an internal manipulation within the
13213 constant folder, in order to find the simplest representation of
13214 the arguments so that their form can be studied. In any cases,
13215 the appropriate type conversions should be put back in the tree
13216 that will get out of the constant folder. */
13237 case COMPONENT_REF:
13238 if (TREE_CODE (arg0) == CONSTRUCTOR
13239 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13241 unsigned HOST_WIDE_INT idx;
13243 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13250 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13251 so all simple results must be passed through pedantic_non_lvalue. */
13252 if (TREE_CODE (arg0) == INTEGER_CST)
13254 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13255 tem = integer_zerop (arg0) ? op2 : op1;
13256 /* Only optimize constant conditions when the selected branch
13257 has the same type as the COND_EXPR. This avoids optimizing
13258 away "c ? x : throw", where the throw has a void type.
13259 Avoid throwing away that operand which contains label. */
13260 if ((!TREE_SIDE_EFFECTS (unused_op)
13261 || !contains_label_p (unused_op))
13262 && (! VOID_TYPE_P (TREE_TYPE (tem))
13263 || VOID_TYPE_P (type)))
13264 return pedantic_non_lvalue_loc (loc, tem);
13267 if (operand_equal_p (arg1, op2, 0))
13268 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13270 /* If we have A op B ? A : C, we may be able to convert this to a
13271 simpler expression, depending on the operation and the values
13272 of B and C. Signed zeros prevent all of these transformations,
13273 for reasons given above each one.
13275 Also try swapping the arguments and inverting the conditional. */
13276 if (COMPARISON_CLASS_P (arg0)
13277 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13278 arg1, TREE_OPERAND (arg0, 1))
13279 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13281 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13286 if (COMPARISON_CLASS_P (arg0)
13287 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13289 TREE_OPERAND (arg0, 1))
13290 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13292 tem = fold_truth_not_expr (loc, arg0);
13293 if (tem && COMPARISON_CLASS_P (tem))
13295 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13301 /* If the second operand is simpler than the third, swap them
13302 since that produces better jump optimization results. */
13303 if (truth_value_p (TREE_CODE (arg0))
13304 && tree_swap_operands_p (op1, op2, false))
13306 /* See if this can be inverted. If it can't, possibly because
13307 it was a floating-point inequality comparison, don't do
13309 tem = fold_truth_not_expr (loc, arg0);
13311 return fold_build3_loc (loc, code, type, tem, op2, op1);
13314 /* Convert A ? 1 : 0 to simply A. */
13315 if (integer_onep (op1)
13316 && integer_zerop (op2)
13317 /* If we try to convert OP0 to our type, the
13318 call to fold will try to move the conversion inside
13319 a COND, which will recurse. In that case, the COND_EXPR
13320 is probably the best choice, so leave it alone. */
13321 && type == TREE_TYPE (arg0))
13322 return pedantic_non_lvalue_loc (loc, arg0);
13324 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13325 over COND_EXPR in cases such as floating point comparisons. */
13326 if (integer_zerop (op1)
13327 && integer_onep (op2)
13328 && truth_value_p (TREE_CODE (arg0)))
13329 return pedantic_non_lvalue_loc (loc,
13330 fold_convert_loc (loc, type,
13331 invert_truthvalue_loc (loc,
13334 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13335 if (TREE_CODE (arg0) == LT_EXPR
13336 && integer_zerop (TREE_OPERAND (arg0, 1))
13337 && integer_zerop (op2)
13338 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13340 /* sign_bit_p only checks ARG1 bits within A's precision.
13341 If <sign bit of A> has wider type than A, bits outside
13342 of A's precision in <sign bit of A> need to be checked.
13343 If they are all 0, this optimization needs to be done
13344 in unsigned A's type, if they are all 1 in signed A's type,
13345 otherwise this can't be done. */
13346 if (TYPE_PRECISION (TREE_TYPE (tem))
13347 < TYPE_PRECISION (TREE_TYPE (arg1))
13348 && TYPE_PRECISION (TREE_TYPE (tem))
13349 < TYPE_PRECISION (type))
13351 unsigned HOST_WIDE_INT mask_lo;
13352 HOST_WIDE_INT mask_hi;
13353 int inner_width, outer_width;
13356 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13357 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13358 if (outer_width > TYPE_PRECISION (type))
13359 outer_width = TYPE_PRECISION (type);
13361 if (outer_width > HOST_BITS_PER_WIDE_INT)
13363 mask_hi = ((unsigned HOST_WIDE_INT) -1
13364 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13370 mask_lo = ((unsigned HOST_WIDE_INT) -1
13371 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13373 if (inner_width > HOST_BITS_PER_WIDE_INT)
13375 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13376 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13380 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13381 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13383 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13384 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13386 tem_type = signed_type_for (TREE_TYPE (tem));
13387 tem = fold_convert_loc (loc, tem_type, tem);
13389 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13390 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13392 tem_type = unsigned_type_for (TREE_TYPE (tem));
13393 tem = fold_convert_loc (loc, tem_type, tem);
13401 fold_convert_loc (loc, type,
13402 fold_build2_loc (loc, BIT_AND_EXPR,
13403 TREE_TYPE (tem), tem,
13404 fold_convert_loc (loc,
13409 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13410 already handled above. */
13411 if (TREE_CODE (arg0) == BIT_AND_EXPR
13412 && integer_onep (TREE_OPERAND (arg0, 1))
13413 && integer_zerop (op2)
13414 && integer_pow2p (arg1))
13416 tree tem = TREE_OPERAND (arg0, 0);
13418 if (TREE_CODE (tem) == RSHIFT_EXPR
13419 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13420 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13421 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13422 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13423 TREE_OPERAND (tem, 0), arg1);
13426 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13427 is probably obsolete because the first operand should be a
13428 truth value (that's why we have the two cases above), but let's
13429 leave it in until we can confirm this for all front-ends. */
13430 if (integer_zerop (op2)
13431 && TREE_CODE (arg0) == NE_EXPR
13432 && integer_zerop (TREE_OPERAND (arg0, 1))
13433 && integer_pow2p (arg1)
13434 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13435 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13436 arg1, OEP_ONLY_CONST))
13437 return pedantic_non_lvalue_loc (loc,
13438 fold_convert_loc (loc, type,
13439 TREE_OPERAND (arg0, 0)));
13441 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13442 if (integer_zerop (op2)
13443 && truth_value_p (TREE_CODE (arg0))
13444 && truth_value_p (TREE_CODE (arg1)))
13445 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13446 fold_convert_loc (loc, type, arg0),
13449 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13450 if (integer_onep (op2)
13451 && truth_value_p (TREE_CODE (arg0))
13452 && truth_value_p (TREE_CODE (arg1)))
13454 /* Only perform transformation if ARG0 is easily inverted. */
13455 tem = fold_truth_not_expr (loc, arg0);
13457 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13458 fold_convert_loc (loc, type, tem),
13462 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13463 if (integer_zerop (arg1)
13464 && truth_value_p (TREE_CODE (arg0))
13465 && truth_value_p (TREE_CODE (op2)))
13467 /* Only perform transformation if ARG0 is easily inverted. */
13468 tem = fold_truth_not_expr (loc, arg0);
13470 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13471 fold_convert_loc (loc, type, tem),
13475 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13476 if (integer_onep (arg1)
13477 && truth_value_p (TREE_CODE (arg0))
13478 && truth_value_p (TREE_CODE (op2)))
13479 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13480 fold_convert_loc (loc, type, arg0),
13486 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13487 of fold_ternary on them. */
13488 gcc_unreachable ();
13490 case BIT_FIELD_REF:
13491 if ((TREE_CODE (arg0) == VECTOR_CST
13492 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13493 && type == TREE_TYPE (TREE_TYPE (arg0)))
13495 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13496 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13499 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13500 && (idx % width) == 0
13501 && (idx = idx / width)
13502 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13504 tree elements = NULL_TREE;
13506 if (TREE_CODE (arg0) == VECTOR_CST)
13507 elements = TREE_VECTOR_CST_ELTS (arg0);
13510 unsigned HOST_WIDE_INT idx;
13513 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13514 elements = tree_cons (NULL_TREE, value, elements);
13516 while (idx-- > 0 && elements)
13517 elements = TREE_CHAIN (elements);
13519 return TREE_VALUE (elements);
13521 return build_zero_cst (type);
13525 /* A bit-field-ref that referenced the full argument can be stripped. */
13526 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13527 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13528 && integer_zerop (op2))
13529 return fold_convert_loc (loc, type, arg0);
13534 /* For integers we can decompose the FMA if possible. */
13535 if (TREE_CODE (arg0) == INTEGER_CST
13536 && TREE_CODE (arg1) == INTEGER_CST)
13537 return fold_build2_loc (loc, PLUS_EXPR, type,
13538 const_binop (MULT_EXPR, arg0, arg1), arg2);
13539 if (integer_zerop (arg2))
13540 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13542 return fold_fma (loc, type, arg0, arg1, arg2);
13546 } /* switch (code) */
13549 /* Perform constant folding and related simplification of EXPR.
13550 The related simplifications include x*1 => x, x*0 => 0, etc.,
13551 and application of the associative law.
13552 NOP_EXPR conversions may be removed freely (as long as we
13553 are careful not to change the type of the overall expression).
13554 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13555 but we can constant-fold them if they have constant operands. */
13557 #ifdef ENABLE_FOLD_CHECKING
13558 # define fold(x) fold_1 (x)
13559 static tree fold_1 (tree);
13565 const tree t = expr;
13566 enum tree_code code = TREE_CODE (t);
13567 enum tree_code_class kind = TREE_CODE_CLASS (code);
13569 location_t loc = EXPR_LOCATION (expr);
13571 /* Return right away if a constant. */
13572 if (kind == tcc_constant)
13575 /* CALL_EXPR-like objects with variable numbers of operands are
13576 treated specially. */
13577 if (kind == tcc_vl_exp)
13579 if (code == CALL_EXPR)
13581 tem = fold_call_expr (loc, expr, false);
13582 return tem ? tem : expr;
13587 if (IS_EXPR_CODE_CLASS (kind))
13589 tree type = TREE_TYPE (t);
13590 tree op0, op1, op2;
13592 switch (TREE_CODE_LENGTH (code))
13595 op0 = TREE_OPERAND (t, 0);
13596 tem = fold_unary_loc (loc, code, type, op0);
13597 return tem ? tem : expr;
13599 op0 = TREE_OPERAND (t, 0);
13600 op1 = TREE_OPERAND (t, 1);
13601 tem = fold_binary_loc (loc, code, type, op0, op1);
13602 return tem ? tem : expr;
13604 op0 = TREE_OPERAND (t, 0);
13605 op1 = TREE_OPERAND (t, 1);
13606 op2 = TREE_OPERAND (t, 2);
13607 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13608 return tem ? tem : expr;
13618 tree op0 = TREE_OPERAND (t, 0);
13619 tree op1 = TREE_OPERAND (t, 1);
13621 if (TREE_CODE (op1) == INTEGER_CST
13622 && TREE_CODE (op0) == CONSTRUCTOR
13623 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13625 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13626 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13627 unsigned HOST_WIDE_INT begin = 0;
13629 /* Find a matching index by means of a binary search. */
13630 while (begin != end)
13632 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13633 tree index = VEC_index (constructor_elt, elts, middle)->index;
13635 if (TREE_CODE (index) == INTEGER_CST
13636 && tree_int_cst_lt (index, op1))
13637 begin = middle + 1;
13638 else if (TREE_CODE (index) == INTEGER_CST
13639 && tree_int_cst_lt (op1, index))
13641 else if (TREE_CODE (index) == RANGE_EXPR
13642 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13643 begin = middle + 1;
13644 else if (TREE_CODE (index) == RANGE_EXPR
13645 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13648 return VEC_index (constructor_elt, elts, middle)->value;
13656 return fold (DECL_INITIAL (t));
13660 } /* switch (code) */
13663 #ifdef ENABLE_FOLD_CHECKING
13666 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13667 static void fold_check_failed (const_tree, const_tree);
13668 void print_fold_checksum (const_tree);
13670 /* When --enable-checking=fold, compute a digest of expr before
13671 and after actual fold call to see if fold did not accidentally
13672 change original expr. */
13678 struct md5_ctx ctx;
13679 unsigned char checksum_before[16], checksum_after[16];
13682 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13683 md5_init_ctx (&ctx);
13684 fold_checksum_tree (expr, &ctx, ht);
13685 md5_finish_ctx (&ctx, checksum_before);
13688 ret = fold_1 (expr);
13690 md5_init_ctx (&ctx);
13691 fold_checksum_tree (expr, &ctx, ht);
13692 md5_finish_ctx (&ctx, checksum_after);
13695 if (memcmp (checksum_before, checksum_after, 16))
13696 fold_check_failed (expr, ret);
13702 print_fold_checksum (const_tree expr)
13704 struct md5_ctx ctx;
13705 unsigned char checksum[16], cnt;
13708 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13709 md5_init_ctx (&ctx);
13710 fold_checksum_tree (expr, &ctx, ht);
13711 md5_finish_ctx (&ctx, checksum);
13713 for (cnt = 0; cnt < 16; ++cnt)
13714 fprintf (stderr, "%02x", checksum[cnt]);
13715 putc ('\n', stderr);
13719 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13721 internal_error ("fold check: original tree changed by fold");
13725 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13728 enum tree_code code;
13729 union tree_node buf;
13734 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13735 <= sizeof (struct tree_function_decl))
13736 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13739 slot = (void **) htab_find_slot (ht, expr, INSERT);
13742 *slot = CONST_CAST_TREE (expr);
13743 code = TREE_CODE (expr);
13744 if (TREE_CODE_CLASS (code) == tcc_declaration
13745 && DECL_ASSEMBLER_NAME_SET_P (expr))
13747 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13748 memcpy ((char *) &buf, expr, tree_size (expr));
13749 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13750 expr = (tree) &buf;
13752 else if (TREE_CODE_CLASS (code) == tcc_type
13753 && (TYPE_POINTER_TO (expr)
13754 || TYPE_REFERENCE_TO (expr)
13755 || TYPE_CACHED_VALUES_P (expr)
13756 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13757 || TYPE_NEXT_VARIANT (expr)))
13759 /* Allow these fields to be modified. */
13761 memcpy ((char *) &buf, expr, tree_size (expr));
13762 expr = tmp = (tree) &buf;
13763 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13764 TYPE_POINTER_TO (tmp) = NULL;
13765 TYPE_REFERENCE_TO (tmp) = NULL;
13766 TYPE_NEXT_VARIANT (tmp) = NULL;
13767 if (TYPE_CACHED_VALUES_P (tmp))
13769 TYPE_CACHED_VALUES_P (tmp) = 0;
13770 TYPE_CACHED_VALUES (tmp) = NULL;
13773 md5_process_bytes (expr, tree_size (expr), ctx);
13774 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13775 if (TREE_CODE_CLASS (code) != tcc_type
13776 && TREE_CODE_CLASS (code) != tcc_declaration
13777 && code != TREE_LIST
13778 && code != SSA_NAME)
13779 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13780 switch (TREE_CODE_CLASS (code))
13786 md5_process_bytes (TREE_STRING_POINTER (expr),
13787 TREE_STRING_LENGTH (expr), ctx);
13790 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13791 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13794 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13800 case tcc_exceptional:
13804 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13805 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13806 expr = TREE_CHAIN (expr);
13807 goto recursive_label;
13810 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13811 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13817 case tcc_expression:
13818 case tcc_reference:
13819 case tcc_comparison:
13822 case tcc_statement:
13824 len = TREE_OPERAND_LENGTH (expr);
13825 for (i = 0; i < len; ++i)
13826 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13828 case tcc_declaration:
13829 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13830 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13831 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13833 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13834 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13835 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13836 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13837 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13839 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13840 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13842 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13844 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13845 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13846 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13850 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13851 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13852 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13853 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13854 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13855 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13856 if (INTEGRAL_TYPE_P (expr)
13857 || SCALAR_FLOAT_TYPE_P (expr))
13859 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13860 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13862 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13863 if (TREE_CODE (expr) == RECORD_TYPE
13864 || TREE_CODE (expr) == UNION_TYPE
13865 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13866 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13867 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13874 /* Helper function for outputting the checksum of a tree T. When
13875 debugging with gdb, you can "define mynext" to be "next" followed
13876 by "call debug_fold_checksum (op0)", then just trace down till the
13879 DEBUG_FUNCTION void
13880 debug_fold_checksum (const_tree t)
13883 unsigned char checksum[16];
13884 struct md5_ctx ctx;
13885 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13887 md5_init_ctx (&ctx);
13888 fold_checksum_tree (t, &ctx, ht);
13889 md5_finish_ctx (&ctx, checksum);
13892 for (i = 0; i < 16; i++)
13893 fprintf (stderr, "%d ", checksum[i]);
13895 fprintf (stderr, "\n");
13900 /* Fold a unary tree expression with code CODE of type TYPE with an
13901 operand OP0. LOC is the location of the resulting expression.
13902 Return a folded expression if successful. Otherwise, return a tree
13903 expression with code CODE of type TYPE with an operand OP0. */
13906 fold_build1_stat_loc (location_t loc,
13907 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13910 #ifdef ENABLE_FOLD_CHECKING
13911 unsigned char checksum_before[16], checksum_after[16];
13912 struct md5_ctx ctx;
13915 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13916 md5_init_ctx (&ctx);
13917 fold_checksum_tree (op0, &ctx, ht);
13918 md5_finish_ctx (&ctx, checksum_before);
13922 tem = fold_unary_loc (loc, code, type, op0);
13924 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13926 #ifdef ENABLE_FOLD_CHECKING
13927 md5_init_ctx (&ctx);
13928 fold_checksum_tree (op0, &ctx, ht);
13929 md5_finish_ctx (&ctx, checksum_after);
13932 if (memcmp (checksum_before, checksum_after, 16))
13933 fold_check_failed (op0, tem);
13938 /* Fold a binary tree expression with code CODE of type TYPE with
13939 operands OP0 and OP1. LOC is the location of the resulting
13940 expression. Return a folded expression if successful. Otherwise,
13941 return a tree expression with code CODE of type TYPE with operands
13945 fold_build2_stat_loc (location_t loc,
13946 enum tree_code code, tree type, tree op0, tree op1
13950 #ifdef ENABLE_FOLD_CHECKING
13951 unsigned char checksum_before_op0[16],
13952 checksum_before_op1[16],
13953 checksum_after_op0[16],
13954 checksum_after_op1[16];
13955 struct md5_ctx ctx;
13958 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13959 md5_init_ctx (&ctx);
13960 fold_checksum_tree (op0, &ctx, ht);
13961 md5_finish_ctx (&ctx, checksum_before_op0);
13964 md5_init_ctx (&ctx);
13965 fold_checksum_tree (op1, &ctx, ht);
13966 md5_finish_ctx (&ctx, checksum_before_op1);
13970 tem = fold_binary_loc (loc, code, type, op0, op1);
13972 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13974 #ifdef ENABLE_FOLD_CHECKING
13975 md5_init_ctx (&ctx);
13976 fold_checksum_tree (op0, &ctx, ht);
13977 md5_finish_ctx (&ctx, checksum_after_op0);
13980 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13981 fold_check_failed (op0, tem);
13983 md5_init_ctx (&ctx);
13984 fold_checksum_tree (op1, &ctx, ht);
13985 md5_finish_ctx (&ctx, checksum_after_op1);
13988 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13989 fold_check_failed (op1, tem);
13994 /* Fold a ternary tree expression with code CODE of type TYPE with
13995 operands OP0, OP1, and OP2. Return a folded expression if
13996 successful. Otherwise, return a tree expression with code CODE of
13997 type TYPE with operands OP0, OP1, and OP2. */
14000 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14001 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14004 #ifdef ENABLE_FOLD_CHECKING
14005 unsigned char checksum_before_op0[16],
14006 checksum_before_op1[16],
14007 checksum_before_op2[16],
14008 checksum_after_op0[16],
14009 checksum_after_op1[16],
14010 checksum_after_op2[16];
14011 struct md5_ctx ctx;
14014 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14015 md5_init_ctx (&ctx);
14016 fold_checksum_tree (op0, &ctx, ht);
14017 md5_finish_ctx (&ctx, checksum_before_op0);
14020 md5_init_ctx (&ctx);
14021 fold_checksum_tree (op1, &ctx, ht);
14022 md5_finish_ctx (&ctx, checksum_before_op1);
14025 md5_init_ctx (&ctx);
14026 fold_checksum_tree (op2, &ctx, ht);
14027 md5_finish_ctx (&ctx, checksum_before_op2);
14031 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14032 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14034 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14036 #ifdef ENABLE_FOLD_CHECKING
14037 md5_init_ctx (&ctx);
14038 fold_checksum_tree (op0, &ctx, ht);
14039 md5_finish_ctx (&ctx, checksum_after_op0);
14042 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14043 fold_check_failed (op0, tem);
14045 md5_init_ctx (&ctx);
14046 fold_checksum_tree (op1, &ctx, ht);
14047 md5_finish_ctx (&ctx, checksum_after_op1);
14050 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14051 fold_check_failed (op1, tem);
14053 md5_init_ctx (&ctx);
14054 fold_checksum_tree (op2, &ctx, ht);
14055 md5_finish_ctx (&ctx, checksum_after_op2);
14058 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14059 fold_check_failed (op2, tem);
14064 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14065 arguments in ARGARRAY, and a null static chain.
14066 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14067 of type TYPE from the given operands as constructed by build_call_array. */
14070 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14071 int nargs, tree *argarray)
14074 #ifdef ENABLE_FOLD_CHECKING
14075 unsigned char checksum_before_fn[16],
14076 checksum_before_arglist[16],
14077 checksum_after_fn[16],
14078 checksum_after_arglist[16];
14079 struct md5_ctx ctx;
14083 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14084 md5_init_ctx (&ctx);
14085 fold_checksum_tree (fn, &ctx, ht);
14086 md5_finish_ctx (&ctx, checksum_before_fn);
14089 md5_init_ctx (&ctx);
14090 for (i = 0; i < nargs; i++)
14091 fold_checksum_tree (argarray[i], &ctx, ht);
14092 md5_finish_ctx (&ctx, checksum_before_arglist);
14096 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14098 #ifdef ENABLE_FOLD_CHECKING
14099 md5_init_ctx (&ctx);
14100 fold_checksum_tree (fn, &ctx, ht);
14101 md5_finish_ctx (&ctx, checksum_after_fn);
14104 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14105 fold_check_failed (fn, tem);
14107 md5_init_ctx (&ctx);
14108 for (i = 0; i < nargs; i++)
14109 fold_checksum_tree (argarray[i], &ctx, ht);
14110 md5_finish_ctx (&ctx, checksum_after_arglist);
14113 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14114 fold_check_failed (NULL_TREE, tem);
14119 /* Perform constant folding and related simplification of initializer
14120 expression EXPR. These behave identically to "fold_buildN" but ignore
14121 potential run-time traps and exceptions that fold must preserve. */
14123 #define START_FOLD_INIT \
14124 int saved_signaling_nans = flag_signaling_nans;\
14125 int saved_trapping_math = flag_trapping_math;\
14126 int saved_rounding_math = flag_rounding_math;\
14127 int saved_trapv = flag_trapv;\
14128 int saved_folding_initializer = folding_initializer;\
14129 flag_signaling_nans = 0;\
14130 flag_trapping_math = 0;\
14131 flag_rounding_math = 0;\
14133 folding_initializer = 1;
14135 #define END_FOLD_INIT \
14136 flag_signaling_nans = saved_signaling_nans;\
14137 flag_trapping_math = saved_trapping_math;\
14138 flag_rounding_math = saved_rounding_math;\
14139 flag_trapv = saved_trapv;\
14140 folding_initializer = saved_folding_initializer;
14143 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14144 tree type, tree op)
14149 result = fold_build1_loc (loc, code, type, op);
14156 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14157 tree type, tree op0, tree op1)
14162 result = fold_build2_loc (loc, code, type, op0, op1);
14169 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14170 tree type, tree op0, tree op1, tree op2)
14175 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14182 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14183 int nargs, tree *argarray)
14188 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14194 #undef START_FOLD_INIT
14195 #undef END_FOLD_INIT
14197 /* Determine if first argument is a multiple of second argument. Return 0 if
14198 it is not, or we cannot easily determined it to be.
14200 An example of the sort of thing we care about (at this point; this routine
14201 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14202 fold cases do now) is discovering that
14204 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14210 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14212 This code also handles discovering that
14214 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14216 is a multiple of 8 so we don't have to worry about dealing with a
14217 possible remainder.
14219 Note that we *look* inside a SAVE_EXPR only to determine how it was
14220 calculated; it is not safe for fold to do much of anything else with the
14221 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14222 at run time. For example, the latter example above *cannot* be implemented
14223 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14224 evaluation time of the original SAVE_EXPR is not necessarily the same at
14225 the time the new expression is evaluated. The only optimization of this
14226 sort that would be valid is changing
14228 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14232 SAVE_EXPR (I) * SAVE_EXPR (J)
14234 (where the same SAVE_EXPR (J) is used in the original and the
14235 transformed version). */
14238 multiple_of_p (tree type, const_tree top, const_tree bottom)
14240 if (operand_equal_p (top, bottom, 0))
14243 if (TREE_CODE (type) != INTEGER_TYPE)
14246 switch (TREE_CODE (top))
14249 /* Bitwise and provides a power of two multiple. If the mask is
14250 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14251 if (!integer_pow2p (bottom))
14256 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14257 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14261 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14262 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14265 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14269 op1 = TREE_OPERAND (top, 1);
14270 /* const_binop may not detect overflow correctly,
14271 so check for it explicitly here. */
14272 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14273 > TREE_INT_CST_LOW (op1)
14274 && TREE_INT_CST_HIGH (op1) == 0
14275 && 0 != (t1 = fold_convert (type,
14276 const_binop (LSHIFT_EXPR,
14279 && !TREE_OVERFLOW (t1))
14280 return multiple_of_p (type, t1, bottom);
14285 /* Can't handle conversions from non-integral or wider integral type. */
14286 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14287 || (TYPE_PRECISION (type)
14288 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14291 /* .. fall through ... */
14294 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14297 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14298 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14301 if (TREE_CODE (bottom) != INTEGER_CST
14302 || integer_zerop (bottom)
14303 || (TYPE_UNSIGNED (type)
14304 && (tree_int_cst_sgn (top) < 0
14305 || tree_int_cst_sgn (bottom) < 0)))
14307 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14315 /* Return true if CODE or TYPE is known to be non-negative. */
14318 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14320 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14321 && truth_value_p (code))
14322 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14323 have a signed:1 type (where the value is -1 and 0). */
14328 /* Return true if (CODE OP0) is known to be non-negative. If the return
14329 value is based on the assumption that signed overflow is undefined,
14330 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14331 *STRICT_OVERFLOW_P. */
14334 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14335 bool *strict_overflow_p)
14337 if (TYPE_UNSIGNED (type))
14343 /* We can't return 1 if flag_wrapv is set because
14344 ABS_EXPR<INT_MIN> = INT_MIN. */
14345 if (!INTEGRAL_TYPE_P (type))
14347 if (TYPE_OVERFLOW_UNDEFINED (type))
14349 *strict_overflow_p = true;
14354 case NON_LVALUE_EXPR:
14356 case FIX_TRUNC_EXPR:
14357 return tree_expr_nonnegative_warnv_p (op0,
14358 strict_overflow_p);
14362 tree inner_type = TREE_TYPE (op0);
14363 tree outer_type = type;
14365 if (TREE_CODE (outer_type) == REAL_TYPE)
14367 if (TREE_CODE (inner_type) == REAL_TYPE)
14368 return tree_expr_nonnegative_warnv_p (op0,
14369 strict_overflow_p);
14370 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14372 if (TYPE_UNSIGNED (inner_type))
14374 return tree_expr_nonnegative_warnv_p (op0,
14375 strict_overflow_p);
14378 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14380 if (TREE_CODE (inner_type) == REAL_TYPE)
14381 return tree_expr_nonnegative_warnv_p (op0,
14382 strict_overflow_p);
14383 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14384 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14385 && TYPE_UNSIGNED (inner_type);
14391 return tree_simple_nonnegative_warnv_p (code, type);
14394 /* We don't know sign of `t', so be conservative and return false. */
14398 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14399 value is based on the assumption that signed overflow is undefined,
14400 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14401 *STRICT_OVERFLOW_P. */
14404 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14405 tree op1, bool *strict_overflow_p)
14407 if (TYPE_UNSIGNED (type))
14412 case POINTER_PLUS_EXPR:
14414 if (FLOAT_TYPE_P (type))
14415 return (tree_expr_nonnegative_warnv_p (op0,
14417 && tree_expr_nonnegative_warnv_p (op1,
14418 strict_overflow_p));
14420 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14421 both unsigned and at least 2 bits shorter than the result. */
14422 if (TREE_CODE (type) == INTEGER_TYPE
14423 && TREE_CODE (op0) == NOP_EXPR
14424 && TREE_CODE (op1) == NOP_EXPR)
14426 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14427 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14428 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14429 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14431 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14432 TYPE_PRECISION (inner2)) + 1;
14433 return prec < TYPE_PRECISION (type);
14439 if (FLOAT_TYPE_P (type))
14441 /* x * x for floating point x is always non-negative. */
14442 if (operand_equal_p (op0, op1, 0))
14444 return (tree_expr_nonnegative_warnv_p (op0,
14446 && tree_expr_nonnegative_warnv_p (op1,
14447 strict_overflow_p));
14450 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14451 both unsigned and their total bits is shorter than the result. */
14452 if (TREE_CODE (type) == INTEGER_TYPE
14453 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14454 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14456 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14457 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14459 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14460 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14463 bool unsigned0 = TYPE_UNSIGNED (inner0);
14464 bool unsigned1 = TYPE_UNSIGNED (inner1);
14466 if (TREE_CODE (op0) == INTEGER_CST)
14467 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14469 if (TREE_CODE (op1) == INTEGER_CST)
14470 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14472 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14473 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14475 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14476 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14477 : TYPE_PRECISION (inner0);
14479 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14480 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14481 : TYPE_PRECISION (inner1);
14483 return precision0 + precision1 < TYPE_PRECISION (type);
14490 return (tree_expr_nonnegative_warnv_p (op0,
14492 || tree_expr_nonnegative_warnv_p (op1,
14493 strict_overflow_p));
14499 case TRUNC_DIV_EXPR:
14500 case CEIL_DIV_EXPR:
14501 case FLOOR_DIV_EXPR:
14502 case ROUND_DIV_EXPR:
14503 return (tree_expr_nonnegative_warnv_p (op0,
14505 && tree_expr_nonnegative_warnv_p (op1,
14506 strict_overflow_p));
14508 case TRUNC_MOD_EXPR:
14509 case CEIL_MOD_EXPR:
14510 case FLOOR_MOD_EXPR:
14511 case ROUND_MOD_EXPR:
14512 return tree_expr_nonnegative_warnv_p (op0,
14513 strict_overflow_p);
14515 return tree_simple_nonnegative_warnv_p (code, type);
14518 /* We don't know sign of `t', so be conservative and return false. */
14522 /* Return true if T is known to be non-negative. If the return
14523 value is based on the assumption that signed overflow is undefined,
14524 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14525 *STRICT_OVERFLOW_P. */
14528 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14530 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14533 switch (TREE_CODE (t))
14536 return tree_int_cst_sgn (t) >= 0;
14539 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14542 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14545 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14547 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14548 strict_overflow_p));
14550 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14553 /* We don't know sign of `t', so be conservative and return false. */
14557 /* Return true if T is known to be non-negative. If the return
14558 value is based on the assumption that signed overflow is undefined,
14559 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14560 *STRICT_OVERFLOW_P. */
14563 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14564 tree arg0, tree arg1, bool *strict_overflow_p)
14566 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14567 switch (DECL_FUNCTION_CODE (fndecl))
14569 CASE_FLT_FN (BUILT_IN_ACOS):
14570 CASE_FLT_FN (BUILT_IN_ACOSH):
14571 CASE_FLT_FN (BUILT_IN_CABS):
14572 CASE_FLT_FN (BUILT_IN_COSH):
14573 CASE_FLT_FN (BUILT_IN_ERFC):
14574 CASE_FLT_FN (BUILT_IN_EXP):
14575 CASE_FLT_FN (BUILT_IN_EXP10):
14576 CASE_FLT_FN (BUILT_IN_EXP2):
14577 CASE_FLT_FN (BUILT_IN_FABS):
14578 CASE_FLT_FN (BUILT_IN_FDIM):
14579 CASE_FLT_FN (BUILT_IN_HYPOT):
14580 CASE_FLT_FN (BUILT_IN_POW10):
14581 CASE_INT_FN (BUILT_IN_FFS):
14582 CASE_INT_FN (BUILT_IN_PARITY):
14583 CASE_INT_FN (BUILT_IN_POPCOUNT):
14584 case BUILT_IN_BSWAP32:
14585 case BUILT_IN_BSWAP64:
14589 CASE_FLT_FN (BUILT_IN_SQRT):
14590 /* sqrt(-0.0) is -0.0. */
14591 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14593 return tree_expr_nonnegative_warnv_p (arg0,
14594 strict_overflow_p);
14596 CASE_FLT_FN (BUILT_IN_ASINH):
14597 CASE_FLT_FN (BUILT_IN_ATAN):
14598 CASE_FLT_FN (BUILT_IN_ATANH):
14599 CASE_FLT_FN (BUILT_IN_CBRT):
14600 CASE_FLT_FN (BUILT_IN_CEIL):
14601 CASE_FLT_FN (BUILT_IN_ERF):
14602 CASE_FLT_FN (BUILT_IN_EXPM1):
14603 CASE_FLT_FN (BUILT_IN_FLOOR):
14604 CASE_FLT_FN (BUILT_IN_FMOD):
14605 CASE_FLT_FN (BUILT_IN_FREXP):
14606 CASE_FLT_FN (BUILT_IN_LCEIL):
14607 CASE_FLT_FN (BUILT_IN_LDEXP):
14608 CASE_FLT_FN (BUILT_IN_LFLOOR):
14609 CASE_FLT_FN (BUILT_IN_LLCEIL):
14610 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14611 CASE_FLT_FN (BUILT_IN_LLRINT):
14612 CASE_FLT_FN (BUILT_IN_LLROUND):
14613 CASE_FLT_FN (BUILT_IN_LRINT):
14614 CASE_FLT_FN (BUILT_IN_LROUND):
14615 CASE_FLT_FN (BUILT_IN_MODF):
14616 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14617 CASE_FLT_FN (BUILT_IN_RINT):
14618 CASE_FLT_FN (BUILT_IN_ROUND):
14619 CASE_FLT_FN (BUILT_IN_SCALB):
14620 CASE_FLT_FN (BUILT_IN_SCALBLN):
14621 CASE_FLT_FN (BUILT_IN_SCALBN):
14622 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14623 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14624 CASE_FLT_FN (BUILT_IN_SINH):
14625 CASE_FLT_FN (BUILT_IN_TANH):
14626 CASE_FLT_FN (BUILT_IN_TRUNC):
14627 /* True if the 1st argument is nonnegative. */
14628 return tree_expr_nonnegative_warnv_p (arg0,
14629 strict_overflow_p);
14631 CASE_FLT_FN (BUILT_IN_FMAX):
14632 /* True if the 1st OR 2nd arguments are nonnegative. */
14633 return (tree_expr_nonnegative_warnv_p (arg0,
14635 || (tree_expr_nonnegative_warnv_p (arg1,
14636 strict_overflow_p)));
14638 CASE_FLT_FN (BUILT_IN_FMIN):
14639 /* True if the 1st AND 2nd arguments are nonnegative. */
14640 return (tree_expr_nonnegative_warnv_p (arg0,
14642 && (tree_expr_nonnegative_warnv_p (arg1,
14643 strict_overflow_p)));
14645 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14646 /* True if the 2nd argument is nonnegative. */
14647 return tree_expr_nonnegative_warnv_p (arg1,
14648 strict_overflow_p);
14650 CASE_FLT_FN (BUILT_IN_POWI):
14651 /* True if the 1st argument is nonnegative or the second
14652 argument is an even integer. */
14653 if (TREE_CODE (arg1) == INTEGER_CST
14654 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14656 return tree_expr_nonnegative_warnv_p (arg0,
14657 strict_overflow_p);
14659 CASE_FLT_FN (BUILT_IN_POW):
14660 /* True if the 1st argument is nonnegative or the second
14661 argument is an even integer valued real. */
14662 if (TREE_CODE (arg1) == REAL_CST)
14667 c = TREE_REAL_CST (arg1);
14668 n = real_to_integer (&c);
14671 REAL_VALUE_TYPE cint;
14672 real_from_integer (&cint, VOIDmode, n,
14673 n < 0 ? -1 : 0, 0);
14674 if (real_identical (&c, &cint))
14678 return tree_expr_nonnegative_warnv_p (arg0,
14679 strict_overflow_p);
14684 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14688 /* Return true if T is known to be non-negative. If the return
14689 value is based on the assumption that signed overflow is undefined,
14690 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14691 *STRICT_OVERFLOW_P. */
14694 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14696 enum tree_code code = TREE_CODE (t);
14697 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14704 tree temp = TARGET_EXPR_SLOT (t);
14705 t = TARGET_EXPR_INITIAL (t);
14707 /* If the initializer is non-void, then it's a normal expression
14708 that will be assigned to the slot. */
14709 if (!VOID_TYPE_P (t))
14710 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14712 /* Otherwise, the initializer sets the slot in some way. One common
14713 way is an assignment statement at the end of the initializer. */
14716 if (TREE_CODE (t) == BIND_EXPR)
14717 t = expr_last (BIND_EXPR_BODY (t));
14718 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14719 || TREE_CODE (t) == TRY_CATCH_EXPR)
14720 t = expr_last (TREE_OPERAND (t, 0));
14721 else if (TREE_CODE (t) == STATEMENT_LIST)
14726 if (TREE_CODE (t) == MODIFY_EXPR
14727 && TREE_OPERAND (t, 0) == temp)
14728 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14729 strict_overflow_p);
14736 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14737 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14739 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14740 get_callee_fndecl (t),
14743 strict_overflow_p);
14745 case COMPOUND_EXPR:
14747 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14748 strict_overflow_p);
14750 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14751 strict_overflow_p);
14753 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14754 strict_overflow_p);
14757 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14761 /* We don't know sign of `t', so be conservative and return false. */
14765 /* Return true if T is known to be non-negative. If the return
14766 value is based on the assumption that signed overflow is undefined,
14767 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14768 *STRICT_OVERFLOW_P. */
14771 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14773 enum tree_code code;
14774 if (t == error_mark_node)
14777 code = TREE_CODE (t);
14778 switch (TREE_CODE_CLASS (code))
14781 case tcc_comparison:
14782 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14784 TREE_OPERAND (t, 0),
14785 TREE_OPERAND (t, 1),
14786 strict_overflow_p);
14789 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14791 TREE_OPERAND (t, 0),
14792 strict_overflow_p);
14795 case tcc_declaration:
14796 case tcc_reference:
14797 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14805 case TRUTH_AND_EXPR:
14806 case TRUTH_OR_EXPR:
14807 case TRUTH_XOR_EXPR:
14808 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14810 TREE_OPERAND (t, 0),
14811 TREE_OPERAND (t, 1),
14812 strict_overflow_p);
14813 case TRUTH_NOT_EXPR:
14814 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14816 TREE_OPERAND (t, 0),
14817 strict_overflow_p);
14824 case WITH_SIZE_EXPR:
14826 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14829 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14833 /* Return true if `t' is known to be non-negative. Handle warnings
14834 about undefined signed overflow. */
14837 tree_expr_nonnegative_p (tree t)
14839 bool ret, strict_overflow_p;
14841 strict_overflow_p = false;
14842 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14843 if (strict_overflow_p)
14844 fold_overflow_warning (("assuming signed overflow does not occur when "
14845 "determining that expression is always "
14847 WARN_STRICT_OVERFLOW_MISC);
14852 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14853 For floating point we further ensure that T is not denormal.
14854 Similar logic is present in nonzero_address in rtlanal.h.
14856 If the return value is based on the assumption that signed overflow
14857 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14858 change *STRICT_OVERFLOW_P. */
14861 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14862 bool *strict_overflow_p)
14867 return tree_expr_nonzero_warnv_p (op0,
14868 strict_overflow_p);
14872 tree inner_type = TREE_TYPE (op0);
14873 tree outer_type = type;
14875 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14876 && tree_expr_nonzero_warnv_p (op0,
14877 strict_overflow_p));
14881 case NON_LVALUE_EXPR:
14882 return tree_expr_nonzero_warnv_p (op0,
14883 strict_overflow_p);
14892 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14893 For floating point we further ensure that T is not denormal.
14894 Similar logic is present in nonzero_address in rtlanal.h.
14896 If the return value is based on the assumption that signed overflow
14897 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14898 change *STRICT_OVERFLOW_P. */
14901 tree_binary_nonzero_warnv_p (enum tree_code code,
14904 tree op1, bool *strict_overflow_p)
14906 bool sub_strict_overflow_p;
14909 case POINTER_PLUS_EXPR:
14911 if (TYPE_OVERFLOW_UNDEFINED (type))
14913 /* With the presence of negative values it is hard
14914 to say something. */
14915 sub_strict_overflow_p = false;
14916 if (!tree_expr_nonnegative_warnv_p (op0,
14917 &sub_strict_overflow_p)
14918 || !tree_expr_nonnegative_warnv_p (op1,
14919 &sub_strict_overflow_p))
14921 /* One of operands must be positive and the other non-negative. */
14922 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14923 overflows, on a twos-complement machine the sum of two
14924 nonnegative numbers can never be zero. */
14925 return (tree_expr_nonzero_warnv_p (op0,
14927 || tree_expr_nonzero_warnv_p (op1,
14928 strict_overflow_p));
14933 if (TYPE_OVERFLOW_UNDEFINED (type))
14935 if (tree_expr_nonzero_warnv_p (op0,
14937 && tree_expr_nonzero_warnv_p (op1,
14938 strict_overflow_p))
14940 *strict_overflow_p = true;
14947 sub_strict_overflow_p = false;
14948 if (tree_expr_nonzero_warnv_p (op0,
14949 &sub_strict_overflow_p)
14950 && tree_expr_nonzero_warnv_p (op1,
14951 &sub_strict_overflow_p))
14953 if (sub_strict_overflow_p)
14954 *strict_overflow_p = true;
14959 sub_strict_overflow_p = false;
14960 if (tree_expr_nonzero_warnv_p (op0,
14961 &sub_strict_overflow_p))
14963 if (sub_strict_overflow_p)
14964 *strict_overflow_p = true;
14966 /* When both operands are nonzero, then MAX must be too. */
14967 if (tree_expr_nonzero_warnv_p (op1,
14968 strict_overflow_p))
14971 /* MAX where operand 0 is positive is positive. */
14972 return tree_expr_nonnegative_warnv_p (op0,
14973 strict_overflow_p);
14975 /* MAX where operand 1 is positive is positive. */
14976 else if (tree_expr_nonzero_warnv_p (op1,
14977 &sub_strict_overflow_p)
14978 && tree_expr_nonnegative_warnv_p (op1,
14979 &sub_strict_overflow_p))
14981 if (sub_strict_overflow_p)
14982 *strict_overflow_p = true;
14988 return (tree_expr_nonzero_warnv_p (op1,
14990 || tree_expr_nonzero_warnv_p (op0,
14991 strict_overflow_p));
15000 /* Return true when T is an address and is known to be nonzero.
15001 For floating point we further ensure that T is not denormal.
15002 Similar logic is present in nonzero_address in rtlanal.h.
15004 If the return value is based on the assumption that signed overflow
15005 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15006 change *STRICT_OVERFLOW_P. */
15009 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15011 bool sub_strict_overflow_p;
15012 switch (TREE_CODE (t))
15015 return !integer_zerop (t);
15019 tree base = TREE_OPERAND (t, 0);
15020 if (!DECL_P (base))
15021 base = get_base_address (base);
15026 /* Weak declarations may link to NULL. Other things may also be NULL
15027 so protect with -fdelete-null-pointer-checks; but not variables
15028 allocated on the stack. */
15030 && (flag_delete_null_pointer_checks
15031 || (DECL_CONTEXT (base)
15032 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15033 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15034 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15036 /* Constants are never weak. */
15037 if (CONSTANT_CLASS_P (base))
15044 sub_strict_overflow_p = false;
15045 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15046 &sub_strict_overflow_p)
15047 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15048 &sub_strict_overflow_p))
15050 if (sub_strict_overflow_p)
15051 *strict_overflow_p = true;
15062 /* Return true when T is an address and is known to be nonzero.
15063 For floating point we further ensure that T is not denormal.
15064 Similar logic is present in nonzero_address in rtlanal.h.
15066 If the return value is based on the assumption that signed overflow
15067 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15068 change *STRICT_OVERFLOW_P. */
15071 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15073 tree type = TREE_TYPE (t);
15074 enum tree_code code;
15076 /* Doing something useful for floating point would need more work. */
15077 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15080 code = TREE_CODE (t);
15081 switch (TREE_CODE_CLASS (code))
15084 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15085 strict_overflow_p);
15087 case tcc_comparison:
15088 return tree_binary_nonzero_warnv_p (code, type,
15089 TREE_OPERAND (t, 0),
15090 TREE_OPERAND (t, 1),
15091 strict_overflow_p);
15093 case tcc_declaration:
15094 case tcc_reference:
15095 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15103 case TRUTH_NOT_EXPR:
15104 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15105 strict_overflow_p);
15107 case TRUTH_AND_EXPR:
15108 case TRUTH_OR_EXPR:
15109 case TRUTH_XOR_EXPR:
15110 return tree_binary_nonzero_warnv_p (code, type,
15111 TREE_OPERAND (t, 0),
15112 TREE_OPERAND (t, 1),
15113 strict_overflow_p);
15120 case WITH_SIZE_EXPR:
15122 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15124 case COMPOUND_EXPR:
15127 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15128 strict_overflow_p);
15131 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15132 strict_overflow_p);
15135 return alloca_call_p (t);
15143 /* Return true when T is an address and is known to be nonzero.
15144 Handle warnings about undefined signed overflow. */
15147 tree_expr_nonzero_p (tree t)
15149 bool ret, strict_overflow_p;
15151 strict_overflow_p = false;
15152 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15153 if (strict_overflow_p)
15154 fold_overflow_warning (("assuming signed overflow does not occur when "
15155 "determining that expression is always "
15157 WARN_STRICT_OVERFLOW_MISC);
15161 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15162 attempt to fold the expression to a constant without modifying TYPE,
15165 If the expression could be simplified to a constant, then return
15166 the constant. If the expression would not be simplified to a
15167 constant, then return NULL_TREE. */
15170 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15172 tree tem = fold_binary (code, type, op0, op1);
15173 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15176 /* Given the components of a unary expression CODE, TYPE and OP0,
15177 attempt to fold the expression to a constant without modifying
15180 If the expression could be simplified to a constant, then return
15181 the constant. If the expression would not be simplified to a
15182 constant, then return NULL_TREE. */
15185 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15187 tree tem = fold_unary (code, type, op0);
15188 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15191 /* If EXP represents referencing an element in a constant string
15192 (either via pointer arithmetic or array indexing), return the
15193 tree representing the value accessed, otherwise return NULL. */
15196 fold_read_from_constant_string (tree exp)
15198 if ((TREE_CODE (exp) == INDIRECT_REF
15199 || TREE_CODE (exp) == ARRAY_REF)
15200 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15202 tree exp1 = TREE_OPERAND (exp, 0);
15205 location_t loc = EXPR_LOCATION (exp);
15207 if (TREE_CODE (exp) == INDIRECT_REF)
15208 string = string_constant (exp1, &index);
15211 tree low_bound = array_ref_low_bound (exp);
15212 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15214 /* Optimize the special-case of a zero lower bound.
15216 We convert the low_bound to sizetype to avoid some problems
15217 with constant folding. (E.g. suppose the lower bound is 1,
15218 and its mode is QI. Without the conversion,l (ARRAY
15219 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15220 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15221 if (! integer_zerop (low_bound))
15222 index = size_diffop_loc (loc, index,
15223 fold_convert_loc (loc, sizetype, low_bound));
15229 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15230 && TREE_CODE (string) == STRING_CST
15231 && TREE_CODE (index) == INTEGER_CST
15232 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15233 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15235 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15236 return build_int_cst_type (TREE_TYPE (exp),
15237 (TREE_STRING_POINTER (string)
15238 [TREE_INT_CST_LOW (index)]));
15243 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15244 an integer constant, real, or fixed-point constant.
15246 TYPE is the type of the result. */
15249 fold_negate_const (tree arg0, tree type)
15251 tree t = NULL_TREE;
15253 switch (TREE_CODE (arg0))
15257 double_int val = tree_to_double_int (arg0);
15258 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15260 t = force_fit_type_double (type, val, 1,
15261 (overflow | TREE_OVERFLOW (arg0))
15262 && !TYPE_UNSIGNED (type));
15267 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15272 FIXED_VALUE_TYPE f;
15273 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15274 &(TREE_FIXED_CST (arg0)), NULL,
15275 TYPE_SATURATING (type));
15276 t = build_fixed (type, f);
15277 /* Propagate overflow flags. */
15278 if (overflow_p | TREE_OVERFLOW (arg0))
15279 TREE_OVERFLOW (t) = 1;
15284 gcc_unreachable ();
15290 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15291 an integer constant or real constant.
15293 TYPE is the type of the result. */
15296 fold_abs_const (tree arg0, tree type)
15298 tree t = NULL_TREE;
15300 switch (TREE_CODE (arg0))
15304 double_int val = tree_to_double_int (arg0);
15306 /* If the value is unsigned or non-negative, then the absolute value
15307 is the same as the ordinary value. */
15308 if (TYPE_UNSIGNED (type)
15309 || !double_int_negative_p (val))
15312 /* If the value is negative, then the absolute value is
15318 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15319 t = force_fit_type_double (type, val, -1,
15320 overflow | TREE_OVERFLOW (arg0));
15326 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15327 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15333 gcc_unreachable ();
15339 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15340 constant. TYPE is the type of the result. */
15343 fold_not_const (const_tree arg0, tree type)
15347 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15349 val = double_int_not (tree_to_double_int (arg0));
15350 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15353 /* Given CODE, a relational operator, the target type, TYPE and two
15354 constant operands OP0 and OP1, return the result of the
15355 relational operation. If the result is not a compile time
15356 constant, then return NULL_TREE. */
15359 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15361 int result, invert;
15363 /* From here on, the only cases we handle are when the result is
15364 known to be a constant. */
15366 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15368 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15369 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15371 /* Handle the cases where either operand is a NaN. */
15372 if (real_isnan (c0) || real_isnan (c1))
15382 case UNORDERED_EXPR:
15396 if (flag_trapping_math)
15402 gcc_unreachable ();
15405 return constant_boolean_node (result, type);
15408 return constant_boolean_node (real_compare (code, c0, c1), type);
15411 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15413 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15414 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15415 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15418 /* Handle equality/inequality of complex constants. */
15419 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15421 tree rcond = fold_relational_const (code, type,
15422 TREE_REALPART (op0),
15423 TREE_REALPART (op1));
15424 tree icond = fold_relational_const (code, type,
15425 TREE_IMAGPART (op0),
15426 TREE_IMAGPART (op1));
15427 if (code == EQ_EXPR)
15428 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15429 else if (code == NE_EXPR)
15430 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15435 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15437 To compute GT, swap the arguments and do LT.
15438 To compute GE, do LT and invert the result.
15439 To compute LE, swap the arguments, do LT and invert the result.
15440 To compute NE, do EQ and invert the result.
15442 Therefore, the code below must handle only EQ and LT. */
15444 if (code == LE_EXPR || code == GT_EXPR)
15449 code = swap_tree_comparison (code);
15452 /* Note that it is safe to invert for real values here because we
15453 have already handled the one case that it matters. */
15456 if (code == NE_EXPR || code == GE_EXPR)
15459 code = invert_tree_comparison (code, false);
15462 /* Compute a result for LT or EQ if args permit;
15463 Otherwise return T. */
15464 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15466 if (code == EQ_EXPR)
15467 result = tree_int_cst_equal (op0, op1);
15468 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15469 result = INT_CST_LT_UNSIGNED (op0, op1);
15471 result = INT_CST_LT (op0, op1);
15478 return constant_boolean_node (result, type);
15481 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15482 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15486 fold_build_cleanup_point_expr (tree type, tree expr)
15488 /* If the expression does not have side effects then we don't have to wrap
15489 it with a cleanup point expression. */
15490 if (!TREE_SIDE_EFFECTS (expr))
15493 /* If the expression is a return, check to see if the expression inside the
15494 return has no side effects or the right hand side of the modify expression
15495 inside the return. If either don't have side effects set we don't need to
15496 wrap the expression in a cleanup point expression. Note we don't check the
15497 left hand side of the modify because it should always be a return decl. */
15498 if (TREE_CODE (expr) == RETURN_EXPR)
15500 tree op = TREE_OPERAND (expr, 0);
15501 if (!op || !TREE_SIDE_EFFECTS (op))
15503 op = TREE_OPERAND (op, 1);
15504 if (!TREE_SIDE_EFFECTS (op))
15508 return build1 (CLEANUP_POINT_EXPR, type, expr);
15511 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15512 of an indirection through OP0, or NULL_TREE if no simplification is
15516 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15522 subtype = TREE_TYPE (sub);
15523 if (!POINTER_TYPE_P (subtype))
15526 if (TREE_CODE (sub) == ADDR_EXPR)
15528 tree op = TREE_OPERAND (sub, 0);
15529 tree optype = TREE_TYPE (op);
15530 /* *&CONST_DECL -> to the value of the const decl. */
15531 if (TREE_CODE (op) == CONST_DECL)
15532 return DECL_INITIAL (op);
15533 /* *&p => p; make sure to handle *&"str"[cst] here. */
15534 if (type == optype)
15536 tree fop = fold_read_from_constant_string (op);
15542 /* *(foo *)&fooarray => fooarray[0] */
15543 else if (TREE_CODE (optype) == ARRAY_TYPE
15544 && type == TREE_TYPE (optype)
15545 && (!in_gimple_form
15546 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15548 tree type_domain = TYPE_DOMAIN (optype);
15549 tree min_val = size_zero_node;
15550 if (type_domain && TYPE_MIN_VALUE (type_domain))
15551 min_val = TYPE_MIN_VALUE (type_domain);
15553 && TREE_CODE (min_val) != INTEGER_CST)
15555 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15556 NULL_TREE, NULL_TREE);
15558 /* *(foo *)&complexfoo => __real__ complexfoo */
15559 else if (TREE_CODE (optype) == COMPLEX_TYPE
15560 && type == TREE_TYPE (optype))
15561 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15562 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15563 else if (TREE_CODE (optype) == VECTOR_TYPE
15564 && type == TREE_TYPE (optype))
15566 tree part_width = TYPE_SIZE (type);
15567 tree index = bitsize_int (0);
15568 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15572 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15573 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15575 tree op00 = TREE_OPERAND (sub, 0);
15576 tree op01 = TREE_OPERAND (sub, 1);
15579 if (TREE_CODE (op00) == ADDR_EXPR)
15582 op00 = TREE_OPERAND (op00, 0);
15583 op00type = TREE_TYPE (op00);
15585 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15586 if (TREE_CODE (op00type) == VECTOR_TYPE
15587 && type == TREE_TYPE (op00type))
15589 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15590 tree part_width = TYPE_SIZE (type);
15591 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15592 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15593 tree index = bitsize_int (indexi);
15595 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15596 return fold_build3_loc (loc,
15597 BIT_FIELD_REF, type, op00,
15598 part_width, index);
15601 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15602 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15603 && type == TREE_TYPE (op00type))
15605 tree size = TYPE_SIZE_UNIT (type);
15606 if (tree_int_cst_equal (size, op01))
15607 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15609 /* ((foo *)&fooarray)[1] => fooarray[1] */
15610 else if (TREE_CODE (op00type) == ARRAY_TYPE
15611 && type == TREE_TYPE (op00type))
15613 tree type_domain = TYPE_DOMAIN (op00type);
15614 tree min_val = size_zero_node;
15615 if (type_domain && TYPE_MIN_VALUE (type_domain))
15616 min_val = TYPE_MIN_VALUE (type_domain);
15617 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15618 TYPE_SIZE_UNIT (type));
15619 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15620 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15621 NULL_TREE, NULL_TREE);
15626 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15627 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15628 && type == TREE_TYPE (TREE_TYPE (subtype))
15629 && (!in_gimple_form
15630 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15633 tree min_val = size_zero_node;
15634 sub = build_fold_indirect_ref_loc (loc, sub);
15635 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15636 if (type_domain && TYPE_MIN_VALUE (type_domain))
15637 min_val = TYPE_MIN_VALUE (type_domain);
15639 && TREE_CODE (min_val) != INTEGER_CST)
15641 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15648 /* Builds an expression for an indirection through T, simplifying some
15652 build_fold_indirect_ref_loc (location_t loc, tree t)
15654 tree type = TREE_TYPE (TREE_TYPE (t));
15655 tree sub = fold_indirect_ref_1 (loc, type, t);
15660 return build1_loc (loc, INDIRECT_REF, type, t);
15663 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15666 fold_indirect_ref_loc (location_t loc, tree t)
15668 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15676 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15677 whose result is ignored. The type of the returned tree need not be
15678 the same as the original expression. */
15681 fold_ignored_result (tree t)
15683 if (!TREE_SIDE_EFFECTS (t))
15684 return integer_zero_node;
15687 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15690 t = TREE_OPERAND (t, 0);
15694 case tcc_comparison:
15695 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15696 t = TREE_OPERAND (t, 0);
15697 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15698 t = TREE_OPERAND (t, 1);
15703 case tcc_expression:
15704 switch (TREE_CODE (t))
15706 case COMPOUND_EXPR:
15707 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15709 t = TREE_OPERAND (t, 0);
15713 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15714 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15716 t = TREE_OPERAND (t, 0);
15729 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15730 This can only be applied to objects of a sizetype. */
15733 round_up_loc (location_t loc, tree value, int divisor)
15735 tree div = NULL_TREE;
15737 gcc_assert (divisor > 0);
15741 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15742 have to do anything. Only do this when we are not given a const,
15743 because in that case, this check is more expensive than just
15745 if (TREE_CODE (value) != INTEGER_CST)
15747 div = build_int_cst (TREE_TYPE (value), divisor);
15749 if (multiple_of_p (TREE_TYPE (value), value, div))
15753 /* If divisor is a power of two, simplify this to bit manipulation. */
15754 if (divisor == (divisor & -divisor))
15756 if (TREE_CODE (value) == INTEGER_CST)
15758 double_int val = tree_to_double_int (value);
15761 if ((val.low & (divisor - 1)) == 0)
15764 overflow_p = TREE_OVERFLOW (value);
15765 val.low &= ~(divisor - 1);
15766 val.low += divisor;
15774 return force_fit_type_double (TREE_TYPE (value), val,
15781 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15782 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15783 t = build_int_cst (TREE_TYPE (value), -divisor);
15784 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15790 div = build_int_cst (TREE_TYPE (value), divisor);
15791 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15792 value = size_binop_loc (loc, MULT_EXPR, value, div);
15798 /* Likewise, but round down. */
15801 round_down_loc (location_t loc, tree value, int divisor)
15803 tree div = NULL_TREE;
15805 gcc_assert (divisor > 0);
15809 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15810 have to do anything. Only do this when we are not given a const,
15811 because in that case, this check is more expensive than just
15813 if (TREE_CODE (value) != INTEGER_CST)
15815 div = build_int_cst (TREE_TYPE (value), divisor);
15817 if (multiple_of_p (TREE_TYPE (value), value, div))
15821 /* If divisor is a power of two, simplify this to bit manipulation. */
15822 if (divisor == (divisor & -divisor))
15826 t = build_int_cst (TREE_TYPE (value), -divisor);
15827 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15832 div = build_int_cst (TREE_TYPE (value), divisor);
15833 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15834 value = size_binop_loc (loc, MULT_EXPR, value, div);
15840 /* Returns the pointer to the base of the object addressed by EXP and
15841 extracts the information about the offset of the access, storing it
15842 to PBITPOS and POFFSET. */
15845 split_address_to_core_and_offset (tree exp,
15846 HOST_WIDE_INT *pbitpos, tree *poffset)
15849 enum machine_mode mode;
15850 int unsignedp, volatilep;
15851 HOST_WIDE_INT bitsize;
15852 location_t loc = EXPR_LOCATION (exp);
15854 if (TREE_CODE (exp) == ADDR_EXPR)
15856 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15857 poffset, &mode, &unsignedp, &volatilep,
15859 core = build_fold_addr_expr_loc (loc, core);
15865 *poffset = NULL_TREE;
15871 /* Returns true if addresses of E1 and E2 differ by a constant, false
15872 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15875 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15878 HOST_WIDE_INT bitpos1, bitpos2;
15879 tree toffset1, toffset2, tdiff, type;
15881 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15882 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15884 if (bitpos1 % BITS_PER_UNIT != 0
15885 || bitpos2 % BITS_PER_UNIT != 0
15886 || !operand_equal_p (core1, core2, 0))
15889 if (toffset1 && toffset2)
15891 type = TREE_TYPE (toffset1);
15892 if (type != TREE_TYPE (toffset2))
15893 toffset2 = fold_convert (type, toffset2);
15895 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15896 if (!cst_and_fits_in_hwi (tdiff))
15899 *diff = int_cst_value (tdiff);
15901 else if (toffset1 || toffset2)
15903 /* If only one of the offsets is non-constant, the difference cannot
15910 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15914 /* Simplify the floating point expression EXP when the sign of the
15915 result is not significant. Return NULL_TREE if no simplification
15919 fold_strip_sign_ops (tree exp)
15922 location_t loc = EXPR_LOCATION (exp);
15924 switch (TREE_CODE (exp))
15928 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15929 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15933 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15935 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15936 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15937 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15938 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15939 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15940 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15943 case COMPOUND_EXPR:
15944 arg0 = TREE_OPERAND (exp, 0);
15945 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15947 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15951 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15952 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15954 return fold_build3_loc (loc,
15955 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15956 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15957 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15962 const enum built_in_function fcode = builtin_mathfn_code (exp);
15965 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15966 /* Strip copysign function call, return the 1st argument. */
15967 arg0 = CALL_EXPR_ARG (exp, 0);
15968 arg1 = CALL_EXPR_ARG (exp, 1);
15969 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15972 /* Strip sign ops from the argument of "odd" math functions. */
15973 if (negate_mathfn_p (fcode))
15975 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15977 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);