1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc != UNKNOWN_LOCATION ? tloc : loc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
167 SET_EXPR_LOCATION (x, loc);
173 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
174 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
175 and SUM1. Then this yields nonzero if overflow occurred during the
178 Overflow occurs if A and B have the same sign, but A and SUM differ in
179 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
181 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
183 /* If ARG2 divides ARG1 with zero remainder, carries out the division
184 of type CODE and returns the quotient.
185 Otherwise returns NULL_TREE. */
188 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
193 /* The sign of the division is according to operand two, that
194 does the correct thing for POINTER_PLUS_EXPR where we want
195 a signed division. */
196 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
197 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
198 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
201 quo = double_int_divmod (tree_to_double_int (arg1),
202 tree_to_double_int (arg2),
205 if (double_int_zero_p (rem))
206 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
211 /* This is nonzero if we should defer warnings about undefined
212 overflow. This facility exists because these warnings are a
213 special case. The code to estimate loop iterations does not want
214 to issue any warnings, since it works with expressions which do not
215 occur in user code. Various bits of cleanup code call fold(), but
216 only use the result if it has certain characteristics (e.g., is a
217 constant); that code only wants to issue a warning if the result is
220 static int fold_deferring_overflow_warnings;
222 /* If a warning about undefined overflow is deferred, this is the
223 warning. Note that this may cause us to turn two warnings into
224 one, but that is fine since it is sufficient to only give one
225 warning per expression. */
227 static const char* fold_deferred_overflow_warning;
229 /* If a warning about undefined overflow is deferred, this is the
230 level at which the warning should be emitted. */
232 static enum warn_strict_overflow_code fold_deferred_overflow_code;
234 /* Start deferring overflow warnings. We could use a stack here to
235 permit nested calls, but at present it is not necessary. */
238 fold_defer_overflow_warnings (void)
240 ++fold_deferring_overflow_warnings;
243 /* Stop deferring overflow warnings. If there is a pending warning,
244 and ISSUE is true, then issue the warning if appropriate. STMT is
245 the statement with which the warning should be associated (used for
246 location information); STMT may be NULL. CODE is the level of the
247 warning--a warn_strict_overflow_code value. This function will use
248 the smaller of CODE and the deferred code when deciding whether to
249 issue the warning. CODE may be zero to mean to always use the
253 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
258 gcc_assert (fold_deferring_overflow_warnings > 0);
259 --fold_deferring_overflow_warnings;
260 if (fold_deferring_overflow_warnings > 0)
262 if (fold_deferred_overflow_warning != NULL
264 && code < (int) fold_deferred_overflow_code)
265 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
269 warnmsg = fold_deferred_overflow_warning;
270 fold_deferred_overflow_warning = NULL;
272 if (!issue || warnmsg == NULL)
275 if (gimple_no_warning_p (stmt))
278 /* Use the smallest code level when deciding to issue the
280 if (code == 0 || code > (int) fold_deferred_overflow_code)
281 code = fold_deferred_overflow_code;
283 if (!issue_strict_overflow_warning (code))
287 locus = input_location;
289 locus = gimple_location (stmt);
290 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
293 /* Stop deferring overflow warnings, ignoring any deferred
297 fold_undefer_and_ignore_overflow_warnings (void)
299 fold_undefer_overflow_warnings (false, NULL, 0);
302 /* Whether we are deferring overflow warnings. */
305 fold_deferring_overflow_warnings_p (void)
307 return fold_deferring_overflow_warnings > 0;
310 /* This is called when we fold something based on the fact that signed
311 overflow is undefined. */
314 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
316 if (fold_deferring_overflow_warnings > 0)
318 if (fold_deferred_overflow_warning == NULL
319 || wc < fold_deferred_overflow_code)
321 fold_deferred_overflow_warning = gmsgid;
322 fold_deferred_overflow_code = wc;
325 else if (issue_strict_overflow_warning (wc))
326 warning (OPT_Wstrict_overflow, gmsgid);
329 /* Return true if the built-in mathematical function specified by CODE
330 is odd, i.e. -f(x) == f(-x). */
333 negate_mathfn_p (enum built_in_function code)
337 CASE_FLT_FN (BUILT_IN_ASIN):
338 CASE_FLT_FN (BUILT_IN_ASINH):
339 CASE_FLT_FN (BUILT_IN_ATAN):
340 CASE_FLT_FN (BUILT_IN_ATANH):
341 CASE_FLT_FN (BUILT_IN_CASIN):
342 CASE_FLT_FN (BUILT_IN_CASINH):
343 CASE_FLT_FN (BUILT_IN_CATAN):
344 CASE_FLT_FN (BUILT_IN_CATANH):
345 CASE_FLT_FN (BUILT_IN_CBRT):
346 CASE_FLT_FN (BUILT_IN_CPROJ):
347 CASE_FLT_FN (BUILT_IN_CSIN):
348 CASE_FLT_FN (BUILT_IN_CSINH):
349 CASE_FLT_FN (BUILT_IN_CTAN):
350 CASE_FLT_FN (BUILT_IN_CTANH):
351 CASE_FLT_FN (BUILT_IN_ERF):
352 CASE_FLT_FN (BUILT_IN_LLROUND):
353 CASE_FLT_FN (BUILT_IN_LROUND):
354 CASE_FLT_FN (BUILT_IN_ROUND):
355 CASE_FLT_FN (BUILT_IN_SIN):
356 CASE_FLT_FN (BUILT_IN_SINH):
357 CASE_FLT_FN (BUILT_IN_TAN):
358 CASE_FLT_FN (BUILT_IN_TANH):
359 CASE_FLT_FN (BUILT_IN_TRUNC):
362 CASE_FLT_FN (BUILT_IN_LLRINT):
363 CASE_FLT_FN (BUILT_IN_LRINT):
364 CASE_FLT_FN (BUILT_IN_NEARBYINT):
365 CASE_FLT_FN (BUILT_IN_RINT):
366 return !flag_rounding_math;
374 /* Check whether we may negate an integer constant T without causing
378 may_negate_without_overflow_p (const_tree t)
380 unsigned HOST_WIDE_INT val;
384 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386 type = TREE_TYPE (t);
387 if (TYPE_UNSIGNED (type))
390 prec = TYPE_PRECISION (type);
391 if (prec > HOST_BITS_PER_WIDE_INT)
393 if (TREE_INT_CST_LOW (t) != 0)
395 prec -= HOST_BITS_PER_WIDE_INT;
396 val = TREE_INT_CST_HIGH (t);
399 val = TREE_INT_CST_LOW (t);
400 if (prec < HOST_BITS_PER_WIDE_INT)
401 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
402 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
405 /* Determine whether an expression T can be cheaply negated using
406 the function negate_expr without introducing undefined overflow. */
409 negate_expr_p (tree t)
416 type = TREE_TYPE (t);
419 switch (TREE_CODE (t))
422 if (TYPE_OVERFLOW_WRAPS (type))
425 /* Check that -CST will not overflow type. */
426 return may_negate_without_overflow_p (t);
428 return (INTEGRAL_TYPE_P (type)
429 && TYPE_OVERFLOW_WRAPS (type));
436 /* We want to canonicalize to positive real constants. Pretend
437 that only negative ones can be easily negated. */
438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
441 return negate_expr_p (TREE_REALPART (t))
442 && negate_expr_p (TREE_IMAGPART (t));
445 return negate_expr_p (TREE_OPERAND (t, 0))
446 && negate_expr_p (TREE_OPERAND (t, 1));
449 return negate_expr_p (TREE_OPERAND (t, 0));
452 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
453 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
455 /* -(A + B) -> (-B) - A. */
456 if (negate_expr_p (TREE_OPERAND (t, 1))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1)))
460 /* -(A + B) -> (-A) - B. */
461 return negate_expr_p (TREE_OPERAND (t, 0));
464 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
465 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
467 && reorder_operands_p (TREE_OPERAND (t, 0),
468 TREE_OPERAND (t, 1));
471 if (TYPE_UNSIGNED (TREE_TYPE (t)))
477 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
478 return negate_expr_p (TREE_OPERAND (t, 1))
479 || negate_expr_p (TREE_OPERAND (t, 0));
487 /* In general we can't negate A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. But if overflow is
490 undefined, we can negate, because - (INT_MIN / 1) is an
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
495 return negate_expr_p (TREE_OPERAND (t, 1))
496 || negate_expr_p (TREE_OPERAND (t, 0));
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
504 return negate_expr_p (tem);
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (builtin_mathfn_code (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (TREE_INT_CST_HIGH (op1) == 0
520 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
521 == TREE_INT_CST_LOW (op1))
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_int_cst (type, 1));
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 tem = fold_negate_const (t, type);
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 return TREE_OPERAND (t, 0);
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1))
605 && reorder_operands_p (TREE_OPERAND (t, 0),
606 TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
627 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
633 if (TYPE_UNSIGNED (type))
639 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
643 return fold_build2_loc (loc, TREE_CODE (t), type,
644 TREE_OPERAND (t, 0), negate_expr (tem));
645 tem = TREE_OPERAND (t, 0);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 negate_expr (tem), TREE_OPERAND (t, 1));
657 /* In general we can't negate A / B, because if A is INT_MIN and
658 B is 1, we may turn this into INT_MIN / -1 which is undefined
659 and actually traps on some architectures. But if overflow is
660 undefined, we can negate, because - (INT_MIN / 1) is an
662 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
664 const char * const warnmsg = G_("assuming signed overflow does not "
665 "occur when negating a division");
666 tem = TREE_OPERAND (t, 1);
667 if (negate_expr_p (tem))
669 if (INTEGRAL_TYPE_P (type)
670 && (TREE_CODE (tem) != INTEGER_CST
671 || integer_onep (tem)))
672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 negate_expr (tem), TREE_OPERAND (t, 1));
690 /* Convert -((double)float) into (double)(-float). */
691 if (TREE_CODE (type) == REAL_TYPE)
693 tem = strip_float_extensions (t);
694 if (tem != t && negate_expr_p (tem))
695 return fold_convert_loc (loc, type, negate_expr (tem));
700 /* Negate -f(x) as f(-x). */
701 if (negate_mathfn_p (builtin_mathfn_code (t))
702 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
706 fndecl = get_callee_fndecl (t);
707 arg = negate_expr (CALL_EXPR_ARG (t, 0));
708 return build_call_expr_loc (loc, fndecl, 1, arg);
713 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
714 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
716 tree op1 = TREE_OPERAND (t, 1);
717 if (TREE_INT_CST_HIGH (op1) == 0
718 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
719 == TREE_INT_CST_LOW (op1))
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
755 tem = fold_negate_expr (loc, t);
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
782 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
783 tree *minus_litp, int negate_p)
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
836 *minus_litp = *litp, *litp = 0;
838 *conp = negate_expr (*conp);
840 var = negate_expr (var);
842 else if (TREE_CONSTANT (in))
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
916 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time.
941 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
944 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
946 double_int op1, op2, res, tmp;
948 tree type = TREE_TYPE (arg1);
949 bool uns = TYPE_UNSIGNED (type);
951 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
952 bool overflow = false;
954 op1 = tree_to_double_int (arg1);
955 op2 = tree_to_double_int (arg2);
960 res = double_int_ior (op1, op2);
964 res = double_int_xor (op1, op2);
968 res = double_int_and (op1, op2);
972 res = double_int_rshift (op1, double_int_to_shwi (op2),
973 TYPE_PRECISION (type), !uns);
977 /* It's unclear from the C standard whether shifts can overflow.
978 The following code ignores overflow; perhaps a C standard
979 interpretation ruling is needed. */
980 res = double_int_lshift (op1, double_int_to_shwi (op2),
981 TYPE_PRECISION (type), !uns);
985 res = double_int_rrotate (op1, double_int_to_shwi (op2),
986 TYPE_PRECISION (type));
990 res = double_int_lrotate (op1, double_int_to_shwi (op2),
991 TYPE_PRECISION (type));
995 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
996 &res.low, &res.high);
1000 neg_double (op2.low, op2.high, &res.low, &res.high);
1001 add_double (op1.low, op1.high, res.low, res.high,
1002 &res.low, &res.high);
1003 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1007 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1008 &res.low, &res.high);
1011 case TRUNC_DIV_EXPR:
1012 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1013 case EXACT_DIV_EXPR:
1014 /* This is a shortcut for a common special case. */
1015 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1016 && !TREE_OVERFLOW (arg1)
1017 && !TREE_OVERFLOW (arg2)
1018 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1020 if (code == CEIL_DIV_EXPR)
1021 op1.low += op2.low - 1;
1023 res.low = op1.low / op2.low, res.high = 0;
1027 /* ... fall through ... */
1029 case ROUND_DIV_EXPR:
1030 if (double_int_zero_p (op2))
1032 if (double_int_one_p (op2))
1037 if (double_int_equal_p (op1, op2)
1038 && ! double_int_zero_p (op1))
1040 res = double_int_one;
1043 overflow = div_and_round_double (code, uns,
1044 op1.low, op1.high, op2.low, op2.high,
1045 &res.low, &res.high,
1046 &tmp.low, &tmp.high);
1049 case TRUNC_MOD_EXPR:
1050 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1051 /* This is a shortcut for a common special case. */
1052 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1053 && !TREE_OVERFLOW (arg1)
1054 && !TREE_OVERFLOW (arg2)
1055 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1057 if (code == CEIL_MOD_EXPR)
1058 op1.low += op2.low - 1;
1059 res.low = op1.low % op2.low, res.high = 0;
1063 /* ... fall through ... */
1065 case ROUND_MOD_EXPR:
1066 if (double_int_zero_p (op2))
1068 overflow = div_and_round_double (code, uns,
1069 op1.low, op1.high, op2.low, op2.high,
1070 &tmp.low, &tmp.high,
1071 &res.low, &res.high);
1075 res = double_int_min (op1, op2, uns);
1079 res = double_int_max (op1, op2, uns);
1088 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1090 /* Propagate overflow flags ourselves. */
1091 if (((!uns || is_sizetype) && overflow)
1092 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1095 TREE_OVERFLOW (t) = 1;
1099 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1100 ((!uns || is_sizetype) && overflow)
1101 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1106 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1107 constant. We assume ARG1 and ARG2 have the same data type, or at least
1108 are the same kind of constant and the same machine mode. Return zero if
1109 combining the constants is not allowed in the current operating mode. */
1112 const_binop (enum tree_code code, tree arg1, tree arg2)
1114 /* Sanity check for the recursive cases. */
1121 if (TREE_CODE (arg1) == INTEGER_CST)
1122 return int_const_binop (code, arg1, arg2, 0);
1124 if (TREE_CODE (arg1) == REAL_CST)
1126 enum machine_mode mode;
1129 REAL_VALUE_TYPE value;
1130 REAL_VALUE_TYPE result;
1134 /* The following codes are handled by real_arithmetic. */
1149 d1 = TREE_REAL_CST (arg1);
1150 d2 = TREE_REAL_CST (arg2);
1152 type = TREE_TYPE (arg1);
1153 mode = TYPE_MODE (type);
1155 /* Don't perform operation if we honor signaling NaNs and
1156 either operand is a NaN. */
1157 if (HONOR_SNANS (mode)
1158 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1161 /* Don't perform operation if it would raise a division
1162 by zero exception. */
1163 if (code == RDIV_EXPR
1164 && REAL_VALUES_EQUAL (d2, dconst0)
1165 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1168 /* If either operand is a NaN, just return it. Otherwise, set up
1169 for floating-point trap; we return an overflow. */
1170 if (REAL_VALUE_ISNAN (d1))
1172 else if (REAL_VALUE_ISNAN (d2))
1175 inexact = real_arithmetic (&value, code, &d1, &d2);
1176 real_convert (&result, mode, &value);
1178 /* Don't constant fold this floating point operation if
1179 the result has overflowed and flag_trapping_math. */
1180 if (flag_trapping_math
1181 && MODE_HAS_INFINITIES (mode)
1182 && REAL_VALUE_ISINF (result)
1183 && !REAL_VALUE_ISINF (d1)
1184 && !REAL_VALUE_ISINF (d2))
1187 /* Don't constant fold this floating point operation if the
1188 result may dependent upon the run-time rounding mode and
1189 flag_rounding_math is set, or if GCC's software emulation
1190 is unable to accurately represent the result. */
1191 if ((flag_rounding_math
1192 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1193 && (inexact || !real_identical (&result, &value)))
1196 t = build_real (type, result);
1198 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1202 if (TREE_CODE (arg1) == FIXED_CST)
1204 FIXED_VALUE_TYPE f1;
1205 FIXED_VALUE_TYPE f2;
1206 FIXED_VALUE_TYPE result;
1211 /* The following codes are handled by fixed_arithmetic. */
1217 case TRUNC_DIV_EXPR:
1218 f2 = TREE_FIXED_CST (arg2);
1223 f2.data.high = TREE_INT_CST_HIGH (arg2);
1224 f2.data.low = TREE_INT_CST_LOW (arg2);
1232 f1 = TREE_FIXED_CST (arg1);
1233 type = TREE_TYPE (arg1);
1234 sat_p = TYPE_SATURATING (type);
1235 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1236 t = build_fixed (type, result);
1237 /* Propagate overflow flags. */
1238 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1239 TREE_OVERFLOW (t) = 1;
1243 if (TREE_CODE (arg1) == COMPLEX_CST)
1245 tree type = TREE_TYPE (arg1);
1246 tree r1 = TREE_REALPART (arg1);
1247 tree i1 = TREE_IMAGPART (arg1);
1248 tree r2 = TREE_REALPART (arg2);
1249 tree i2 = TREE_IMAGPART (arg2);
1256 real = const_binop (code, r1, r2);
1257 imag = const_binop (code, i1, i2);
1261 if (COMPLEX_FLOAT_TYPE_P (type))
1262 return do_mpc_arg2 (arg1, arg2, type,
1263 /* do_nonfinite= */ folding_initializer,
1266 real = const_binop (MINUS_EXPR,
1267 const_binop (MULT_EXPR, r1, r2),
1268 const_binop (MULT_EXPR, i1, i2));
1269 imag = const_binop (PLUS_EXPR,
1270 const_binop (MULT_EXPR, r1, i2),
1271 const_binop (MULT_EXPR, i1, r2));
1275 if (COMPLEX_FLOAT_TYPE_P (type))
1276 return do_mpc_arg2 (arg1, arg2, type,
1277 /* do_nonfinite= */ folding_initializer,
1280 case TRUNC_DIV_EXPR:
1282 case FLOOR_DIV_EXPR:
1283 case ROUND_DIV_EXPR:
1284 if (flag_complex_method == 0)
1286 /* Keep this algorithm in sync with
1287 tree-complex.c:expand_complex_div_straight().
1289 Expand complex division to scalars, straightforward algorithm.
1290 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1294 = const_binop (PLUS_EXPR,
1295 const_binop (MULT_EXPR, r2, r2),
1296 const_binop (MULT_EXPR, i2, i2));
1298 = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r1, r2),
1300 const_binop (MULT_EXPR, i1, i2));
1302 = const_binop (MINUS_EXPR,
1303 const_binop (MULT_EXPR, i1, r2),
1304 const_binop (MULT_EXPR, r1, i2));
1306 real = const_binop (code, t1, magsquared);
1307 imag = const_binop (code, t2, magsquared);
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_wide().
1314 Expand complex division to scalars, modified algorithm to minimize
1315 overflow with wide input ranges. */
1316 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1317 fold_abs_const (r2, TREE_TYPE (type)),
1318 fold_abs_const (i2, TREE_TYPE (type)));
1320 if (integer_nonzerop (compare))
1322 /* In the TRUE branch, we compute
1324 div = (br * ratio) + bi;
1325 tr = (ar * ratio) + ai;
1326 ti = (ai * ratio) - ar;
1329 tree ratio = const_binop (code, r2, i2);
1330 tree div = const_binop (PLUS_EXPR, i2,
1331 const_binop (MULT_EXPR, r2, ratio));
1332 real = const_binop (MULT_EXPR, r1, ratio);
1333 real = const_binop (PLUS_EXPR, real, i1);
1334 real = const_binop (code, real, div);
1336 imag = const_binop (MULT_EXPR, i1, ratio);
1337 imag = const_binop (MINUS_EXPR, imag, r1);
1338 imag = const_binop (code, imag, div);
1342 /* In the FALSE branch, we compute
1344 divisor = (d * ratio) + c;
1345 tr = (b * ratio) + a;
1346 ti = b - (a * ratio);
1349 tree ratio = const_binop (code, i2, r2);
1350 tree div = const_binop (PLUS_EXPR, r2,
1351 const_binop (MULT_EXPR, i2, ratio));
1353 real = const_binop (MULT_EXPR, i1, ratio);
1354 real = const_binop (PLUS_EXPR, real, r1);
1355 real = const_binop (code, real, div);
1357 imag = const_binop (MULT_EXPR, r1, ratio);
1358 imag = const_binop (MINUS_EXPR, i1, imag);
1359 imag = const_binop (code, imag, div);
1369 return build_complex (type, real, imag);
1372 if (TREE_CODE (arg1) == VECTOR_CST)
1374 tree type = TREE_TYPE(arg1);
1375 int count = TYPE_VECTOR_SUBPARTS (type), i;
1376 tree elements1, elements2, list = NULL_TREE;
1378 if(TREE_CODE(arg2) != VECTOR_CST)
1381 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1382 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1384 for (i = 0; i < count; i++)
1386 tree elem1, elem2, elem;
1388 /* The trailing elements can be empty and should be treated as 0 */
1390 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1393 elem1 = TREE_VALUE(elements1);
1394 elements1 = TREE_CHAIN (elements1);
1398 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1401 elem2 = TREE_VALUE(elements2);
1402 elements2 = TREE_CHAIN (elements2);
1405 elem = const_binop (code, elem1, elem2);
1407 /* It is possible that const_binop cannot handle the given
1408 code and return NULL_TREE */
1409 if(elem == NULL_TREE)
1412 list = tree_cons (NULL_TREE, elem, list);
1414 return build_vector(type, nreverse(list));
1419 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1420 indicates which particular sizetype to create. */
1423 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1425 return build_int_cst (sizetype_tab[(int) kind], number);
1428 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1429 is a tree code. The type of the result is taken from the operands.
1430 Both must be equivalent integer types, ala int_binop_types_match_p.
1431 If the operands are constant, so is the result. */
1434 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1436 tree type = TREE_TYPE (arg0);
1438 if (arg0 == error_mark_node || arg1 == error_mark_node)
1439 return error_mark_node;
1441 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1444 /* Handle the special case of two integer constants faster. */
1445 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1447 /* And some specific cases even faster than that. */
1448 if (code == PLUS_EXPR)
1450 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1452 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1455 else if (code == MINUS_EXPR)
1457 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1460 else if (code == MULT_EXPR)
1462 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1466 /* Handle general case of two integer constants. */
1467 return int_const_binop (code, arg0, arg1, 0);
1470 return fold_build2_loc (loc, code, type, arg0, arg1);
1473 /* Given two values, either both of sizetype or both of bitsizetype,
1474 compute the difference between the two values. Return the value
1475 in signed type corresponding to the type of the operands. */
1478 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1480 tree type = TREE_TYPE (arg0);
1483 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1486 /* If the type is already signed, just do the simple thing. */
1487 if (!TYPE_UNSIGNED (type))
1488 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1490 if (type == sizetype)
1492 else if (type == bitsizetype)
1493 ctype = sbitsizetype;
1495 ctype = signed_type_for (type);
1497 /* If either operand is not a constant, do the conversions to the signed
1498 type and subtract. The hardware will do the right thing with any
1499 overflow in the subtraction. */
1500 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1501 return size_binop_loc (loc, MINUS_EXPR,
1502 fold_convert_loc (loc, ctype, arg0),
1503 fold_convert_loc (loc, ctype, arg1));
1505 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1506 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1507 overflow) and negate (which can't either). Special-case a result
1508 of zero while we're here. */
1509 if (tree_int_cst_equal (arg0, arg1))
1510 return build_int_cst (ctype, 0);
1511 else if (tree_int_cst_lt (arg1, arg0))
1512 return fold_convert_loc (loc, ctype,
1513 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1515 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1516 fold_convert_loc (loc, ctype,
1517 size_binop_loc (loc,
1522 /* A subroutine of fold_convert_const handling conversions of an
1523 INTEGER_CST to another integer type. */
1526 fold_convert_const_int_from_int (tree type, const_tree arg1)
1530 /* Given an integer constant, make new constant with new type,
1531 appropriately sign-extended or truncated. */
1532 t = force_fit_type_double (type, tree_to_double_int (arg1),
1533 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1534 (TREE_INT_CST_HIGH (arg1) < 0
1535 && (TYPE_UNSIGNED (type)
1536 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1537 | TREE_OVERFLOW (arg1));
1542 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1543 to an integer type. */
1546 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1551 /* The following code implements the floating point to integer
1552 conversion rules required by the Java Language Specification,
1553 that IEEE NaNs are mapped to zero and values that overflow
1554 the target precision saturate, i.e. values greater than
1555 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1556 are mapped to INT_MIN. These semantics are allowed by the
1557 C and C++ standards that simply state that the behavior of
1558 FP-to-integer conversion is unspecified upon overflow. */
1562 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1566 case FIX_TRUNC_EXPR:
1567 real_trunc (&r, VOIDmode, &x);
1574 /* If R is NaN, return zero and show we have an overflow. */
1575 if (REAL_VALUE_ISNAN (r))
1578 val = double_int_zero;
1581 /* See if R is less than the lower bound or greater than the
1586 tree lt = TYPE_MIN_VALUE (type);
1587 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1588 if (REAL_VALUES_LESS (r, l))
1591 val = tree_to_double_int (lt);
1597 tree ut = TYPE_MAX_VALUE (type);
1600 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1601 if (REAL_VALUES_LESS (u, r))
1604 val = tree_to_double_int (ut);
1610 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1612 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1616 /* A subroutine of fold_convert_const handling conversions of a
1617 FIXED_CST to an integer type. */
1620 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1623 double_int temp, temp_trunc;
1626 /* Right shift FIXED_CST to temp by fbit. */
1627 temp = TREE_FIXED_CST (arg1).data;
1628 mode = TREE_FIXED_CST (arg1).mode;
1629 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1631 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1632 HOST_BITS_PER_DOUBLE_INT,
1633 SIGNED_FIXED_POINT_MODE_P (mode));
1635 /* Left shift temp to temp_trunc by fbit. */
1636 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1637 HOST_BITS_PER_DOUBLE_INT,
1638 SIGNED_FIXED_POINT_MODE_P (mode));
1642 temp = double_int_zero;
1643 temp_trunc = double_int_zero;
1646 /* If FIXED_CST is negative, we need to round the value toward 0.
1647 By checking if the fractional bits are not zero to add 1 to temp. */
1648 if (SIGNED_FIXED_POINT_MODE_P (mode)
1649 && double_int_negative_p (temp_trunc)
1650 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1651 temp = double_int_add (temp, double_int_one);
1653 /* Given a fixed-point constant, make new constant with new type,
1654 appropriately sign-extended or truncated. */
1655 t = force_fit_type_double (type, temp, -1,
1656 (double_int_negative_p (temp)
1657 && (TYPE_UNSIGNED (type)
1658 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1659 | TREE_OVERFLOW (arg1));
1664 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1665 to another floating point type. */
1668 fold_convert_const_real_from_real (tree type, const_tree arg1)
1670 REAL_VALUE_TYPE value;
1673 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1674 t = build_real (type, value);
1676 /* If converting an infinity or NAN to a representation that doesn't
1677 have one, set the overflow bit so that we can produce some kind of
1678 error message at the appropriate point if necessary. It's not the
1679 most user-friendly message, but it's better than nothing. */
1680 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1681 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1682 TREE_OVERFLOW (t) = 1;
1683 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1684 && !MODE_HAS_NANS (TYPE_MODE (type)))
1685 TREE_OVERFLOW (t) = 1;
1686 /* Regular overflow, conversion produced an infinity in a mode that
1687 can't represent them. */
1688 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1689 && REAL_VALUE_ISINF (value)
1690 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1691 TREE_OVERFLOW (t) = 1;
1693 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1697 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1698 to a floating point type. */
1701 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1703 REAL_VALUE_TYPE value;
1706 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1707 t = build_real (type, value);
1709 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1713 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1714 to another fixed-point type. */
1717 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1719 FIXED_VALUE_TYPE value;
1723 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1724 TYPE_SATURATING (type));
1725 t = build_fixed (type, value);
1727 /* Propagate overflow flags. */
1728 if (overflow_p | TREE_OVERFLOW (arg1))
1729 TREE_OVERFLOW (t) = 1;
1733 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1734 to a fixed-point type. */
1737 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1739 FIXED_VALUE_TYPE value;
1743 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1744 TREE_INT_CST (arg1),
1745 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1746 TYPE_SATURATING (type));
1747 t = build_fixed (type, value);
1749 /* Propagate overflow flags. */
1750 if (overflow_p | TREE_OVERFLOW (arg1))
1751 TREE_OVERFLOW (t) = 1;
1755 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1756 to a fixed-point type. */
1759 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1761 FIXED_VALUE_TYPE value;
1765 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1766 &TREE_REAL_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1776 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1777 type TYPE. If no simplification can be done return NULL_TREE. */
1780 fold_convert_const (enum tree_code code, tree type, tree arg1)
1782 if (TREE_TYPE (arg1) == type)
1785 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1786 || TREE_CODE (type) == OFFSET_TYPE)
1788 if (TREE_CODE (arg1) == INTEGER_CST)
1789 return fold_convert_const_int_from_int (type, arg1);
1790 else if (TREE_CODE (arg1) == REAL_CST)
1791 return fold_convert_const_int_from_real (code, type, arg1);
1792 else if (TREE_CODE (arg1) == FIXED_CST)
1793 return fold_convert_const_int_from_fixed (type, arg1);
1795 else if (TREE_CODE (type) == REAL_TYPE)
1797 if (TREE_CODE (arg1) == INTEGER_CST)
1798 return build_real_from_int_cst (type, arg1);
1799 else if (TREE_CODE (arg1) == REAL_CST)
1800 return fold_convert_const_real_from_real (type, arg1);
1801 else if (TREE_CODE (arg1) == FIXED_CST)
1802 return fold_convert_const_real_from_fixed (type, arg1);
1804 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1806 if (TREE_CODE (arg1) == FIXED_CST)
1807 return fold_convert_const_fixed_from_fixed (type, arg1);
1808 else if (TREE_CODE (arg1) == INTEGER_CST)
1809 return fold_convert_const_fixed_from_int (type, arg1);
1810 else if (TREE_CODE (arg1) == REAL_CST)
1811 return fold_convert_const_fixed_from_real (type, arg1);
1816 /* Construct a vector of zero elements of vector type TYPE. */
1819 build_zero_vector (tree type)
1823 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1824 return build_vector_from_val (type, t);
1827 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1830 fold_convertible_p (const_tree type, const_tree arg)
1832 tree orig = TREE_TYPE (arg);
1837 if (TREE_CODE (arg) == ERROR_MARK
1838 || TREE_CODE (type) == ERROR_MARK
1839 || TREE_CODE (orig) == ERROR_MARK)
1842 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1845 switch (TREE_CODE (type))
1847 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1848 case POINTER_TYPE: case REFERENCE_TYPE:
1850 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1851 || TREE_CODE (orig) == OFFSET_TYPE)
1853 return (TREE_CODE (orig) == VECTOR_TYPE
1854 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1857 case FIXED_POINT_TYPE:
1861 return TREE_CODE (type) == TREE_CODE (orig);
1868 /* Convert expression ARG to type TYPE. Used by the middle-end for
1869 simple conversions in preference to calling the front-end's convert. */
1872 fold_convert_loc (location_t loc, tree type, tree arg)
1874 tree orig = TREE_TYPE (arg);
1880 if (TREE_CODE (arg) == ERROR_MARK
1881 || TREE_CODE (type) == ERROR_MARK
1882 || TREE_CODE (orig) == ERROR_MARK)
1883 return error_mark_node;
1885 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1886 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1888 switch (TREE_CODE (type))
1891 case REFERENCE_TYPE:
1892 /* Handle conversions between pointers to different address spaces. */
1893 if (POINTER_TYPE_P (orig)
1894 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1895 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1896 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1899 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1901 if (TREE_CODE (arg) == INTEGER_CST)
1903 tem = fold_convert_const (NOP_EXPR, type, arg);
1904 if (tem != NULL_TREE)
1907 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1908 || TREE_CODE (orig) == OFFSET_TYPE)
1909 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1910 if (TREE_CODE (orig) == COMPLEX_TYPE)
1911 return fold_convert_loc (loc, type,
1912 fold_build1_loc (loc, REALPART_EXPR,
1913 TREE_TYPE (orig), arg));
1914 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1915 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1916 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1919 if (TREE_CODE (arg) == INTEGER_CST)
1921 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1922 if (tem != NULL_TREE)
1925 else if (TREE_CODE (arg) == REAL_CST)
1927 tem = fold_convert_const (NOP_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1931 else if (TREE_CODE (arg) == FIXED_CST)
1933 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1934 if (tem != NULL_TREE)
1938 switch (TREE_CODE (orig))
1941 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1942 case POINTER_TYPE: case REFERENCE_TYPE:
1943 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1946 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1948 case FIXED_POINT_TYPE:
1949 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1952 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1953 return fold_convert_loc (loc, type, tem);
1959 case FIXED_POINT_TYPE:
1960 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1961 || TREE_CODE (arg) == REAL_CST)
1963 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1964 if (tem != NULL_TREE)
1965 goto fold_convert_exit;
1968 switch (TREE_CODE (orig))
1970 case FIXED_POINT_TYPE:
1975 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1978 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1979 return fold_convert_loc (loc, type, tem);
1986 switch (TREE_CODE (orig))
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1992 case FIXED_POINT_TYPE:
1993 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1994 fold_convert_loc (loc, TREE_TYPE (type), arg),
1995 fold_convert_loc (loc, TREE_TYPE (type),
1996 integer_zero_node));
2001 if (TREE_CODE (arg) == COMPLEX_EXPR)
2003 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2004 TREE_OPERAND (arg, 0));
2005 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2006 TREE_OPERAND (arg, 1));
2007 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2010 arg = save_expr (arg);
2011 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2012 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2013 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2014 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2015 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2023 if (integer_zerop (arg))
2024 return build_zero_vector (type);
2025 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2026 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2027 || TREE_CODE (orig) == VECTOR_TYPE);
2028 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2031 tem = fold_ignored_result (arg);
2032 if (TREE_CODE (tem) == MODIFY_EXPR)
2033 goto fold_convert_exit;
2034 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2040 protected_set_expr_location_unshare (tem, loc);
2044 /* Return false if expr can be assumed not to be an lvalue, true
2048 maybe_lvalue_p (const_tree x)
2050 /* We only need to wrap lvalue tree codes. */
2051 switch (TREE_CODE (x))
2064 case ARRAY_RANGE_REF:
2070 case PREINCREMENT_EXPR:
2071 case PREDECREMENT_EXPR:
2073 case TRY_CATCH_EXPR:
2074 case WITH_CLEANUP_EXPR:
2083 /* Assume the worst for front-end tree codes. */
2084 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2092 /* Return an expr equal to X but certainly not valid as an lvalue. */
2095 non_lvalue_loc (location_t loc, tree x)
2097 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2102 if (! maybe_lvalue_p (x))
2104 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2107 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2108 Zero means allow extended lvalues. */
2110 int pedantic_lvalues;
2112 /* When pedantic, return an expr equal to X but certainly not valid as a
2113 pedantic lvalue. Otherwise, return X. */
2116 pedantic_non_lvalue_loc (location_t loc, tree x)
2118 if (pedantic_lvalues)
2119 return non_lvalue_loc (loc, x);
2121 return protected_set_expr_location_unshare (x, loc);
2124 /* Given a tree comparison code, return the code that is the logical inverse
2125 of the given code. It is not safe to do this for floating-point
2126 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2127 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2130 invert_tree_comparison (enum tree_code code, bool honor_nans)
2132 if (honor_nans && flag_trapping_math)
2142 return honor_nans ? UNLE_EXPR : LE_EXPR;
2144 return honor_nans ? UNLT_EXPR : LT_EXPR;
2146 return honor_nans ? UNGE_EXPR : GE_EXPR;
2148 return honor_nans ? UNGT_EXPR : GT_EXPR;
2162 return UNORDERED_EXPR;
2163 case UNORDERED_EXPR:
2164 return ORDERED_EXPR;
2170 /* Similar, but return the comparison that results if the operands are
2171 swapped. This is safe for floating-point. */
2174 swap_tree_comparison (enum tree_code code)
2181 case UNORDERED_EXPR:
2207 /* Convert a comparison tree code from an enum tree_code representation
2208 into a compcode bit-based encoding. This function is the inverse of
2209 compcode_to_comparison. */
2211 static enum comparison_code
2212 comparison_to_compcode (enum tree_code code)
2229 return COMPCODE_ORD;
2230 case UNORDERED_EXPR:
2231 return COMPCODE_UNORD;
2233 return COMPCODE_UNLT;
2235 return COMPCODE_UNEQ;
2237 return COMPCODE_UNLE;
2239 return COMPCODE_UNGT;
2241 return COMPCODE_LTGT;
2243 return COMPCODE_UNGE;
2249 /* Convert a compcode bit-based encoding of a comparison operator back
2250 to GCC's enum tree_code representation. This function is the
2251 inverse of comparison_to_compcode. */
2253 static enum tree_code
2254 compcode_to_comparison (enum comparison_code code)
2271 return ORDERED_EXPR;
2272 case COMPCODE_UNORD:
2273 return UNORDERED_EXPR;
2291 /* Return a tree for the comparison which is the combination of
2292 doing the AND or OR (depending on CODE) of the two operations LCODE
2293 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2294 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2295 if this makes the transformation invalid. */
2298 combine_comparisons (location_t loc,
2299 enum tree_code code, enum tree_code lcode,
2300 enum tree_code rcode, tree truth_type,
2301 tree ll_arg, tree lr_arg)
2303 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2304 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2305 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2310 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2311 compcode = lcompcode & rcompcode;
2314 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2315 compcode = lcompcode | rcompcode;
2324 /* Eliminate unordered comparisons, as well as LTGT and ORD
2325 which are not used unless the mode has NaNs. */
2326 compcode &= ~COMPCODE_UNORD;
2327 if (compcode == COMPCODE_LTGT)
2328 compcode = COMPCODE_NE;
2329 else if (compcode == COMPCODE_ORD)
2330 compcode = COMPCODE_TRUE;
2332 else if (flag_trapping_math)
2334 /* Check that the original operation and the optimized ones will trap
2335 under the same condition. */
2336 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2337 && (lcompcode != COMPCODE_EQ)
2338 && (lcompcode != COMPCODE_ORD);
2339 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2340 && (rcompcode != COMPCODE_EQ)
2341 && (rcompcode != COMPCODE_ORD);
2342 bool trap = (compcode & COMPCODE_UNORD) == 0
2343 && (compcode != COMPCODE_EQ)
2344 && (compcode != COMPCODE_ORD);
2346 /* In a short-circuited boolean expression the LHS might be
2347 such that the RHS, if evaluated, will never trap. For
2348 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2349 if neither x nor y is NaN. (This is a mixed blessing: for
2350 example, the expression above will never trap, hence
2351 optimizing it to x < y would be invalid). */
2352 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2353 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2356 /* If the comparison was short-circuited, and only the RHS
2357 trapped, we may now generate a spurious trap. */
2359 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2362 /* If we changed the conditions that cause a trap, we lose. */
2363 if ((ltrap || rtrap) != trap)
2367 if (compcode == COMPCODE_TRUE)
2368 return constant_boolean_node (true, truth_type);
2369 else if (compcode == COMPCODE_FALSE)
2370 return constant_boolean_node (false, truth_type);
2373 enum tree_code tcode;
2375 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2376 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2380 /* Return nonzero if two operands (typically of the same tree node)
2381 are necessarily equal. If either argument has side-effects this
2382 function returns zero. FLAGS modifies behavior as follows:
2384 If OEP_ONLY_CONST is set, only return nonzero for constants.
2385 This function tests whether the operands are indistinguishable;
2386 it does not test whether they are equal using C's == operation.
2387 The distinction is important for IEEE floating point, because
2388 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2389 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2391 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2392 even though it may hold multiple values during a function.
2393 This is because a GCC tree node guarantees that nothing else is
2394 executed between the evaluation of its "operands" (which may often
2395 be evaluated in arbitrary order). Hence if the operands themselves
2396 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2397 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2398 unset means assuming isochronic (or instantaneous) tree equivalence.
2399 Unless comparing arbitrary expression trees, such as from different
2400 statements, this flag can usually be left unset.
2402 If OEP_PURE_SAME is set, then pure functions with identical arguments
2403 are considered the same. It is used when the caller has other ways
2404 to ensure that global memory is unchanged in between. */
2407 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2409 /* If either is ERROR_MARK, they aren't equal. */
2410 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2411 || TREE_TYPE (arg0) == error_mark_node
2412 || TREE_TYPE (arg1) == error_mark_node)
2415 /* Similar, if either does not have a type (like a released SSA name),
2416 they aren't equal. */
2417 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2420 /* Check equality of integer constants before bailing out due to
2421 precision differences. */
2422 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2423 return tree_int_cst_equal (arg0, arg1);
2425 /* If both types don't have the same signedness, then we can't consider
2426 them equal. We must check this before the STRIP_NOPS calls
2427 because they may change the signedness of the arguments. As pointers
2428 strictly don't have a signedness, require either two pointers or
2429 two non-pointers as well. */
2430 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2431 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2434 /* We cannot consider pointers to different address space equal. */
2435 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2436 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2437 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2440 /* If both types don't have the same precision, then it is not safe
2442 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2448 /* In case both args are comparisons but with different comparison
2449 code, try to swap the comparison operands of one arg to produce
2450 a match and compare that variant. */
2451 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2452 && COMPARISON_CLASS_P (arg0)
2453 && COMPARISON_CLASS_P (arg1))
2455 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2457 if (TREE_CODE (arg0) == swap_code)
2458 return operand_equal_p (TREE_OPERAND (arg0, 0),
2459 TREE_OPERAND (arg1, 1), flags)
2460 && operand_equal_p (TREE_OPERAND (arg0, 1),
2461 TREE_OPERAND (arg1, 0), flags);
2464 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2465 /* This is needed for conversions and for COMPONENT_REF.
2466 Might as well play it safe and always test this. */
2467 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2468 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2469 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2472 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2473 We don't care about side effects in that case because the SAVE_EXPR
2474 takes care of that for us. In all other cases, two expressions are
2475 equal if they have no side effects. If we have two identical
2476 expressions with side effects that should be treated the same due
2477 to the only side effects being identical SAVE_EXPR's, that will
2478 be detected in the recursive calls below. */
2479 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2480 && (TREE_CODE (arg0) == SAVE_EXPR
2481 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2484 /* Next handle constant cases, those for which we can return 1 even
2485 if ONLY_CONST is set. */
2486 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2487 switch (TREE_CODE (arg0))
2490 return tree_int_cst_equal (arg0, arg1);
2493 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2494 TREE_FIXED_CST (arg1));
2497 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2498 TREE_REAL_CST (arg1)))
2502 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2504 /* If we do not distinguish between signed and unsigned zero,
2505 consider them equal. */
2506 if (real_zerop (arg0) && real_zerop (arg1))
2515 v1 = TREE_VECTOR_CST_ELTS (arg0);
2516 v2 = TREE_VECTOR_CST_ELTS (arg1);
2519 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2522 v1 = TREE_CHAIN (v1);
2523 v2 = TREE_CHAIN (v2);
2530 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2532 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2536 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2537 && ! memcmp (TREE_STRING_POINTER (arg0),
2538 TREE_STRING_POINTER (arg1),
2539 TREE_STRING_LENGTH (arg0)));
2542 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2548 if (flags & OEP_ONLY_CONST)
2551 /* Define macros to test an operand from arg0 and arg1 for equality and a
2552 variant that allows null and views null as being different from any
2553 non-null value. In the latter case, if either is null, the both
2554 must be; otherwise, do the normal comparison. */
2555 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2556 TREE_OPERAND (arg1, N), flags)
2558 #define OP_SAME_WITH_NULL(N) \
2559 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2560 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2562 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2565 /* Two conversions are equal only if signedness and modes match. */
2566 switch (TREE_CODE (arg0))
2569 case FIX_TRUNC_EXPR:
2570 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2571 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2581 case tcc_comparison:
2583 if (OP_SAME (0) && OP_SAME (1))
2586 /* For commutative ops, allow the other order. */
2587 return (commutative_tree_code (TREE_CODE (arg0))
2588 && operand_equal_p (TREE_OPERAND (arg0, 0),
2589 TREE_OPERAND (arg1, 1), flags)
2590 && operand_equal_p (TREE_OPERAND (arg0, 1),
2591 TREE_OPERAND (arg1, 0), flags));
2594 /* If either of the pointer (or reference) expressions we are
2595 dereferencing contain a side effect, these cannot be equal. */
2596 if (TREE_SIDE_EFFECTS (arg0)
2597 || TREE_SIDE_EFFECTS (arg1))
2600 switch (TREE_CODE (arg0))
2608 /* Require equal access sizes, and similar pointer types.
2609 We can have incomplete types for array references of
2610 variable-sized arrays from the Fortran frontent
2612 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2613 || (TYPE_SIZE (TREE_TYPE (arg0))
2614 && TYPE_SIZE (TREE_TYPE (arg1))
2615 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2616 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2617 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2618 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2619 && OP_SAME (0) && OP_SAME (1));
2622 case ARRAY_RANGE_REF:
2623 /* Operands 2 and 3 may be null.
2624 Compare the array index by value if it is constant first as we
2625 may have different types but same value here. */
2627 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2628 TREE_OPERAND (arg1, 1))
2630 && OP_SAME_WITH_NULL (2)
2631 && OP_SAME_WITH_NULL (3));
2634 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2635 may be NULL when we're called to compare MEM_EXPRs. */
2636 return OP_SAME_WITH_NULL (0)
2638 && OP_SAME_WITH_NULL (2);
2641 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2647 case tcc_expression:
2648 switch (TREE_CODE (arg0))
2651 case TRUTH_NOT_EXPR:
2654 case TRUTH_ANDIF_EXPR:
2655 case TRUTH_ORIF_EXPR:
2656 return OP_SAME (0) && OP_SAME (1);
2659 case WIDEN_MULT_PLUS_EXPR:
2660 case WIDEN_MULT_MINUS_EXPR:
2663 /* The multiplcation operands are commutative. */
2666 case TRUTH_AND_EXPR:
2668 case TRUTH_XOR_EXPR:
2669 if (OP_SAME (0) && OP_SAME (1))
2672 /* Otherwise take into account this is a commutative operation. */
2673 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2674 TREE_OPERAND (arg1, 1), flags)
2675 && operand_equal_p (TREE_OPERAND (arg0, 1),
2676 TREE_OPERAND (arg1, 0), flags));
2681 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2688 switch (TREE_CODE (arg0))
2691 /* If the CALL_EXPRs call different functions, then they
2692 clearly can not be equal. */
2693 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2698 unsigned int cef = call_expr_flags (arg0);
2699 if (flags & OEP_PURE_SAME)
2700 cef &= ECF_CONST | ECF_PURE;
2707 /* Now see if all the arguments are the same. */
2709 const_call_expr_arg_iterator iter0, iter1;
2711 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2712 a1 = first_const_call_expr_arg (arg1, &iter1);
2714 a0 = next_const_call_expr_arg (&iter0),
2715 a1 = next_const_call_expr_arg (&iter1))
2716 if (! operand_equal_p (a0, a1, flags))
2719 /* If we get here and both argument lists are exhausted
2720 then the CALL_EXPRs are equal. */
2721 return ! (a0 || a1);
2727 case tcc_declaration:
2728 /* Consider __builtin_sqrt equal to sqrt. */
2729 return (TREE_CODE (arg0) == FUNCTION_DECL
2730 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2731 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2732 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2739 #undef OP_SAME_WITH_NULL
2742 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2743 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2745 When in doubt, return 0. */
2748 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2750 int unsignedp1, unsignedpo;
2751 tree primarg0, primarg1, primother;
2752 unsigned int correct_width;
2754 if (operand_equal_p (arg0, arg1, 0))
2757 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2758 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2761 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2762 and see if the inner values are the same. This removes any
2763 signedness comparison, which doesn't matter here. */
2764 primarg0 = arg0, primarg1 = arg1;
2765 STRIP_NOPS (primarg0);
2766 STRIP_NOPS (primarg1);
2767 if (operand_equal_p (primarg0, primarg1, 0))
2770 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2771 actual comparison operand, ARG0.
2773 First throw away any conversions to wider types
2774 already present in the operands. */
2776 primarg1 = get_narrower (arg1, &unsignedp1);
2777 primother = get_narrower (other, &unsignedpo);
2779 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2780 if (unsignedp1 == unsignedpo
2781 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2782 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2784 tree type = TREE_TYPE (arg0);
2786 /* Make sure shorter operand is extended the right way
2787 to match the longer operand. */
2788 primarg1 = fold_convert (signed_or_unsigned_type_for
2789 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2791 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2798 /* See if ARG is an expression that is either a comparison or is performing
2799 arithmetic on comparisons. The comparisons must only be comparing
2800 two different values, which will be stored in *CVAL1 and *CVAL2; if
2801 they are nonzero it means that some operands have already been found.
2802 No variables may be used anywhere else in the expression except in the
2803 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2804 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2806 If this is true, return 1. Otherwise, return zero. */
2809 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2811 enum tree_code code = TREE_CODE (arg);
2812 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2814 /* We can handle some of the tcc_expression cases here. */
2815 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2817 else if (tclass == tcc_expression
2818 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2819 || code == COMPOUND_EXPR))
2820 tclass = tcc_binary;
2822 else if (tclass == tcc_expression && code == SAVE_EXPR
2823 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2825 /* If we've already found a CVAL1 or CVAL2, this expression is
2826 two complex to handle. */
2827 if (*cval1 || *cval2)
2837 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2840 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2841 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2842 cval1, cval2, save_p));
2847 case tcc_expression:
2848 if (code == COND_EXPR)
2849 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2850 cval1, cval2, save_p)
2851 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2852 cval1, cval2, save_p)
2853 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2854 cval1, cval2, save_p));
2857 case tcc_comparison:
2858 /* First see if we can handle the first operand, then the second. For
2859 the second operand, we know *CVAL1 can't be zero. It must be that
2860 one side of the comparison is each of the values; test for the
2861 case where this isn't true by failing if the two operands
2864 if (operand_equal_p (TREE_OPERAND (arg, 0),
2865 TREE_OPERAND (arg, 1), 0))
2869 *cval1 = TREE_OPERAND (arg, 0);
2870 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2872 else if (*cval2 == 0)
2873 *cval2 = TREE_OPERAND (arg, 0);
2874 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2879 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2881 else if (*cval2 == 0)
2882 *cval2 = TREE_OPERAND (arg, 1);
2883 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2895 /* ARG is a tree that is known to contain just arithmetic operations and
2896 comparisons. Evaluate the operations in the tree substituting NEW0 for
2897 any occurrence of OLD0 as an operand of a comparison and likewise for
2901 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2902 tree old1, tree new1)
2904 tree type = TREE_TYPE (arg);
2905 enum tree_code code = TREE_CODE (arg);
2906 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2908 /* We can handle some of the tcc_expression cases here. */
2909 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2911 else if (tclass == tcc_expression
2912 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2913 tclass = tcc_binary;
2918 return fold_build1_loc (loc, code, type,
2919 eval_subst (loc, TREE_OPERAND (arg, 0),
2920 old0, new0, old1, new1));
2923 return fold_build2_loc (loc, code, type,
2924 eval_subst (loc, TREE_OPERAND (arg, 0),
2925 old0, new0, old1, new1),
2926 eval_subst (loc, TREE_OPERAND (arg, 1),
2927 old0, new0, old1, new1));
2929 case tcc_expression:
2933 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2937 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2941 return fold_build3_loc (loc, code, type,
2942 eval_subst (loc, TREE_OPERAND (arg, 0),
2943 old0, new0, old1, new1),
2944 eval_subst (loc, TREE_OPERAND (arg, 1),
2945 old0, new0, old1, new1),
2946 eval_subst (loc, TREE_OPERAND (arg, 2),
2947 old0, new0, old1, new1));
2951 /* Fall through - ??? */
2953 case tcc_comparison:
2955 tree arg0 = TREE_OPERAND (arg, 0);
2956 tree arg1 = TREE_OPERAND (arg, 1);
2958 /* We need to check both for exact equality and tree equality. The
2959 former will be true if the operand has a side-effect. In that
2960 case, we know the operand occurred exactly once. */
2962 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2964 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2967 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2969 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2972 return fold_build2_loc (loc, code, type, arg0, arg1);
2980 /* Return a tree for the case when the result of an expression is RESULT
2981 converted to TYPE and OMITTED was previously an operand of the expression
2982 but is now not needed (e.g., we folded OMITTED * 0).
2984 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2985 the conversion of RESULT to TYPE. */
2988 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2990 tree t = fold_convert_loc (loc, type, result);
2992 /* If the resulting operand is an empty statement, just return the omitted
2993 statement casted to void. */
2994 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2995 return build1_loc (loc, NOP_EXPR, void_type_node,
2996 fold_ignored_result (omitted));
2998 if (TREE_SIDE_EFFECTS (omitted))
2999 return build2_loc (loc, COMPOUND_EXPR, type,
3000 fold_ignored_result (omitted), t);
3002 return non_lvalue_loc (loc, t);
3005 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3008 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3011 tree t = fold_convert_loc (loc, type, result);
3013 /* If the resulting operand is an empty statement, just return the omitted
3014 statement casted to void. */
3015 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3016 return build1_loc (loc, NOP_EXPR, void_type_node,
3017 fold_ignored_result (omitted));
3019 if (TREE_SIDE_EFFECTS (omitted))
3020 return build2_loc (loc, COMPOUND_EXPR, type,
3021 fold_ignored_result (omitted), t);
3023 return pedantic_non_lvalue_loc (loc, t);
3026 /* Return a tree for the case when the result of an expression is RESULT
3027 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3028 of the expression but are now not needed.
3030 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3031 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3032 evaluated before OMITTED2. Otherwise, if neither has side effects,
3033 just do the conversion of RESULT to TYPE. */
3036 omit_two_operands_loc (location_t loc, tree type, tree result,
3037 tree omitted1, tree omitted2)
3039 tree t = fold_convert_loc (loc, type, result);
3041 if (TREE_SIDE_EFFECTS (omitted2))
3042 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3043 if (TREE_SIDE_EFFECTS (omitted1))
3044 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3046 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3050 /* Return a simplified tree node for the truth-negation of ARG. This
3051 never alters ARG itself. We assume that ARG is an operation that
3052 returns a truth value (0 or 1).
3054 FIXME: one would think we would fold the result, but it causes
3055 problems with the dominator optimizer. */
3058 fold_truth_not_expr (location_t loc, tree arg)
3060 tree type = TREE_TYPE (arg);
3061 enum tree_code code = TREE_CODE (arg);
3062 location_t loc1, loc2;
3064 /* If this is a comparison, we can simply invert it, except for
3065 floating-point non-equality comparisons, in which case we just
3066 enclose a TRUTH_NOT_EXPR around what we have. */
3068 if (TREE_CODE_CLASS (code) == tcc_comparison)
3070 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3071 if (FLOAT_TYPE_P (op_type)
3072 && flag_trapping_math
3073 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3074 && code != NE_EXPR && code != EQ_EXPR)
3077 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3078 if (code == ERROR_MARK)
3081 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3082 TREE_OPERAND (arg, 1));
3088 return constant_boolean_node (integer_zerop (arg), type);
3090 case TRUTH_AND_EXPR:
3091 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3092 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3093 return build2_loc (loc, TRUTH_OR_EXPR, type,
3094 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3095 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3098 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3099 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3100 return build2_loc (loc, TRUTH_AND_EXPR, type,
3101 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3102 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3104 case TRUTH_XOR_EXPR:
3105 /* Here we can invert either operand. We invert the first operand
3106 unless the second operand is a TRUTH_NOT_EXPR in which case our
3107 result is the XOR of the first operand with the inside of the
3108 negation of the second operand. */
3110 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3111 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3112 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3114 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3115 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3116 TREE_OPERAND (arg, 1));
3118 case TRUTH_ANDIF_EXPR:
3119 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3120 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3121 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3122 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3123 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3125 case TRUTH_ORIF_EXPR:
3126 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3127 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3129 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3130 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3132 case TRUTH_NOT_EXPR:
3133 return TREE_OPERAND (arg, 0);
3137 tree arg1 = TREE_OPERAND (arg, 1);
3138 tree arg2 = TREE_OPERAND (arg, 2);
3140 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3141 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3143 /* A COND_EXPR may have a throw as one operand, which
3144 then has void type. Just leave void operands
3146 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3147 VOID_TYPE_P (TREE_TYPE (arg1))
3148 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3149 VOID_TYPE_P (TREE_TYPE (arg2))
3150 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3154 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3155 return build2_loc (loc, COMPOUND_EXPR, type,
3156 TREE_OPERAND (arg, 0),
3157 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3159 case NON_LVALUE_EXPR:
3160 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3161 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3164 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3165 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3167 /* ... fall through ... */
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 return build1_loc (loc, TREE_CODE (arg), type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3175 if (!integer_onep (TREE_OPERAND (arg, 1)))
3177 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3180 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3182 case CLEANUP_POINT_EXPR:
3183 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3184 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3185 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3192 /* Return a simplified tree node for the truth-negation of ARG. This
3193 never alters ARG itself. We assume that ARG is an operation that
3194 returns a truth value (0 or 1).
3196 FIXME: one would think we would fold the result, but it causes
3197 problems with the dominator optimizer. */
3200 invert_truthvalue_loc (location_t loc, tree arg)
3204 if (TREE_CODE (arg) == ERROR_MARK)
3207 tem = fold_truth_not_expr (loc, arg);
3209 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3214 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3215 operands are another bit-wise operation with a common input. If so,
3216 distribute the bit operations to save an operation and possibly two if
3217 constants are involved. For example, convert
3218 (A | B) & (A | C) into A | (B & C)
3219 Further simplification will occur if B and C are constants.
3221 If this optimization cannot be done, 0 will be returned. */
3224 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3225 tree arg0, tree arg1)
3230 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3231 || TREE_CODE (arg0) == code
3232 || (TREE_CODE (arg0) != BIT_AND_EXPR
3233 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3236 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3238 common = TREE_OPERAND (arg0, 0);
3239 left = TREE_OPERAND (arg0, 1);
3240 right = TREE_OPERAND (arg1, 1);
3242 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3244 common = TREE_OPERAND (arg0, 0);
3245 left = TREE_OPERAND (arg0, 1);
3246 right = TREE_OPERAND (arg1, 0);
3248 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3250 common = TREE_OPERAND (arg0, 1);
3251 left = TREE_OPERAND (arg0, 0);
3252 right = TREE_OPERAND (arg1, 1);
3254 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3256 common = TREE_OPERAND (arg0, 1);
3257 left = TREE_OPERAND (arg0, 0);
3258 right = TREE_OPERAND (arg1, 0);
3263 common = fold_convert_loc (loc, type, common);
3264 left = fold_convert_loc (loc, type, left);
3265 right = fold_convert_loc (loc, type, right);
3266 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3267 fold_build2_loc (loc, code, type, left, right));
3270 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3271 with code CODE. This optimization is unsafe. */
3273 distribute_real_division (location_t loc, enum tree_code code, tree type,
3274 tree arg0, tree arg1)
3276 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3277 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3279 /* (A / C) +- (B / C) -> (A +- B) / C. */
3281 && operand_equal_p (TREE_OPERAND (arg0, 1),
3282 TREE_OPERAND (arg1, 1), 0))
3283 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3284 fold_build2_loc (loc, code, type,
3285 TREE_OPERAND (arg0, 0),
3286 TREE_OPERAND (arg1, 0)),
3287 TREE_OPERAND (arg0, 1));
3289 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3290 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3291 TREE_OPERAND (arg1, 0), 0)
3292 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3293 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3295 REAL_VALUE_TYPE r0, r1;
3296 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3297 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3299 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3301 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3302 real_arithmetic (&r0, code, &r0, &r1);
3303 return fold_build2_loc (loc, MULT_EXPR, type,
3304 TREE_OPERAND (arg0, 0),
3305 build_real (type, r0));
3311 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3312 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3315 make_bit_field_ref (location_t loc, tree inner, tree type,
3316 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3318 tree result, bftype;
3322 tree size = TYPE_SIZE (TREE_TYPE (inner));
3323 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3324 || POINTER_TYPE_P (TREE_TYPE (inner)))
3325 && host_integerp (size, 0)
3326 && tree_low_cst (size, 0) == bitsize)
3327 return fold_convert_loc (loc, type, inner);
3331 if (TYPE_PRECISION (bftype) != bitsize
3332 || TYPE_UNSIGNED (bftype) == !unsignedp)
3333 bftype = build_nonstandard_integer_type (bitsize, 0);
3335 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3336 size_int (bitsize), bitsize_int (bitpos));
3339 result = fold_convert_loc (loc, type, result);
3344 /* Optimize a bit-field compare.
3346 There are two cases: First is a compare against a constant and the
3347 second is a comparison of two items where the fields are at the same
3348 bit position relative to the start of a chunk (byte, halfword, word)
3349 large enough to contain it. In these cases we can avoid the shift
3350 implicit in bitfield extractions.
3352 For constants, we emit a compare of the shifted constant with the
3353 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3354 compared. For two fields at the same position, we do the ANDs with the
3355 similar mask and compare the result of the ANDs.
3357 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3358 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3359 are the left and right operands of the comparison, respectively.
3361 If the optimization described above can be done, we return the resulting
3362 tree. Otherwise we return zero. */
3365 optimize_bit_field_compare (location_t loc, enum tree_code code,
3366 tree compare_type, tree lhs, tree rhs)
3368 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3369 tree type = TREE_TYPE (lhs);
3370 tree signed_type, unsigned_type;
3371 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3372 enum machine_mode lmode, rmode, nmode;
3373 int lunsignedp, runsignedp;
3374 int lvolatilep = 0, rvolatilep = 0;
3375 tree linner, rinner = NULL_TREE;
3379 /* Get all the information about the extractions being done. If the bit size
3380 if the same as the size of the underlying object, we aren't doing an
3381 extraction at all and so can do nothing. We also don't want to
3382 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3383 then will no longer be able to replace it. */
3384 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3385 &lunsignedp, &lvolatilep, false);
3386 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3387 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3392 /* If this is not a constant, we can only do something if bit positions,
3393 sizes, and signedness are the same. */
3394 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3395 &runsignedp, &rvolatilep, false);
3397 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3398 || lunsignedp != runsignedp || offset != 0
3399 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3403 /* See if we can find a mode to refer to this field. We should be able to,
3404 but fail if we can't. */
3406 && GET_MODE_BITSIZE (lmode) > 0
3407 && flag_strict_volatile_bitfields > 0)
3410 nmode = get_best_mode (lbitsize, lbitpos,
3411 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3412 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3413 TYPE_ALIGN (TREE_TYPE (rinner))),
3414 word_mode, lvolatilep || rvolatilep);
3415 if (nmode == VOIDmode)
3418 /* Set signed and unsigned types of the precision of this mode for the
3420 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3421 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3423 /* Compute the bit position and size for the new reference and our offset
3424 within it. If the new reference is the same size as the original, we
3425 won't optimize anything, so return zero. */
3426 nbitsize = GET_MODE_BITSIZE (nmode);
3427 nbitpos = lbitpos & ~ (nbitsize - 1);
3429 if (nbitsize == lbitsize)
3432 if (BYTES_BIG_ENDIAN)
3433 lbitpos = nbitsize - lbitsize - lbitpos;
3435 /* Make the mask to be used against the extracted field. */
3436 mask = build_int_cst_type (unsigned_type, -1);
3437 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3438 mask = const_binop (RSHIFT_EXPR, mask,
3439 size_int (nbitsize - lbitsize - lbitpos));
3442 /* If not comparing with constant, just rework the comparison
3444 return fold_build2_loc (loc, code, compare_type,
3445 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3446 make_bit_field_ref (loc, linner,
3451 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3452 make_bit_field_ref (loc, rinner,
3458 /* Otherwise, we are handling the constant case. See if the constant is too
3459 big for the field. Warn and return a tree of for 0 (false) if so. We do
3460 this not only for its own sake, but to avoid having to test for this
3461 error case below. If we didn't, we might generate wrong code.
3463 For unsigned fields, the constant shifted right by the field length should
3464 be all zero. For signed fields, the high-order bits should agree with
3469 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3470 fold_convert_loc (loc,
3471 unsigned_type, rhs),
3472 size_int (lbitsize))))
3474 warning (0, "comparison is always %d due to width of bit-field",
3476 return constant_boolean_node (code == NE_EXPR, compare_type);
3481 tree tem = const_binop (RSHIFT_EXPR,
3482 fold_convert_loc (loc, signed_type, rhs),
3483 size_int (lbitsize - 1));
3484 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3486 warning (0, "comparison is always %d due to width of bit-field",
3488 return constant_boolean_node (code == NE_EXPR, compare_type);
3492 /* Single-bit compares should always be against zero. */
3493 if (lbitsize == 1 && ! integer_zerop (rhs))
3495 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3496 rhs = build_int_cst (type, 0);
3499 /* Make a new bitfield reference, shift the constant over the
3500 appropriate number of bits and mask it with the computed mask
3501 (in case this was a signed field). If we changed it, make a new one. */
3502 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3505 TREE_SIDE_EFFECTS (lhs) = 1;
3506 TREE_THIS_VOLATILE (lhs) = 1;
3509 rhs = const_binop (BIT_AND_EXPR,
3510 const_binop (LSHIFT_EXPR,
3511 fold_convert_loc (loc, unsigned_type, rhs),
3512 size_int (lbitpos)),
3515 lhs = build2_loc (loc, code, compare_type,
3516 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3520 /* Subroutine for fold_truthop: decode a field reference.
3522 If EXP is a comparison reference, we return the innermost reference.
3524 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3525 set to the starting bit number.
3527 If the innermost field can be completely contained in a mode-sized
3528 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3530 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3531 otherwise it is not changed.
3533 *PUNSIGNEDP is set to the signedness of the field.
3535 *PMASK is set to the mask used. This is either contained in a
3536 BIT_AND_EXPR or derived from the width of the field.
3538 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3540 Return 0 if this is not a component reference or is one that we can't
3541 do anything with. */
3544 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3545 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3546 int *punsignedp, int *pvolatilep,
3547 tree *pmask, tree *pand_mask)
3549 tree outer_type = 0;
3551 tree mask, inner, offset;
3553 unsigned int precision;
3555 /* All the optimizations using this function assume integer fields.
3556 There are problems with FP fields since the type_for_size call
3557 below can fail for, e.g., XFmode. */
3558 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3561 /* We are interested in the bare arrangement of bits, so strip everything
3562 that doesn't affect the machine mode. However, record the type of the
3563 outermost expression if it may matter below. */
3564 if (CONVERT_EXPR_P (exp)
3565 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3566 outer_type = TREE_TYPE (exp);
3569 if (TREE_CODE (exp) == BIT_AND_EXPR)
3571 and_mask = TREE_OPERAND (exp, 1);
3572 exp = TREE_OPERAND (exp, 0);
3573 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3574 if (TREE_CODE (and_mask) != INTEGER_CST)
3578 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3579 punsignedp, pvolatilep, false);
3580 if ((inner == exp && and_mask == 0)
3581 || *pbitsize < 0 || offset != 0
3582 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3585 /* If the number of bits in the reference is the same as the bitsize of
3586 the outer type, then the outer type gives the signedness. Otherwise
3587 (in case of a small bitfield) the signedness is unchanged. */
3588 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3589 *punsignedp = TYPE_UNSIGNED (outer_type);
3591 /* Compute the mask to access the bitfield. */
3592 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3593 precision = TYPE_PRECISION (unsigned_type);
3595 mask = build_int_cst_type (unsigned_type, -1);
3597 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3598 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3600 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3602 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3603 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3606 *pand_mask = and_mask;
3610 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3614 all_ones_mask_p (const_tree mask, int size)
3616 tree type = TREE_TYPE (mask);
3617 unsigned int precision = TYPE_PRECISION (type);
3620 tmask = build_int_cst_type (signed_type_for (type), -1);
3623 tree_int_cst_equal (mask,
3624 const_binop (RSHIFT_EXPR,
3625 const_binop (LSHIFT_EXPR, tmask,
3626 size_int (precision - size)),
3627 size_int (precision - size)));
3630 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3631 represents the sign bit of EXP's type. If EXP represents a sign
3632 or zero extension, also test VAL against the unextended type.
3633 The return value is the (sub)expression whose sign bit is VAL,
3634 or NULL_TREE otherwise. */
3637 sign_bit_p (tree exp, const_tree val)
3639 unsigned HOST_WIDE_INT mask_lo, lo;
3640 HOST_WIDE_INT mask_hi, hi;
3644 /* Tree EXP must have an integral type. */
3645 t = TREE_TYPE (exp);
3646 if (! INTEGRAL_TYPE_P (t))
3649 /* Tree VAL must be an integer constant. */
3650 if (TREE_CODE (val) != INTEGER_CST
3651 || TREE_OVERFLOW (val))
3654 width = TYPE_PRECISION (t);
3655 if (width > HOST_BITS_PER_WIDE_INT)
3657 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3660 mask_hi = ((unsigned HOST_WIDE_INT) -1
3661 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3667 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3670 mask_lo = ((unsigned HOST_WIDE_INT) -1
3671 >> (HOST_BITS_PER_WIDE_INT - width));
3674 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3675 treat VAL as if it were unsigned. */
3676 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3677 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3680 /* Handle extension from a narrower type. */
3681 if (TREE_CODE (exp) == NOP_EXPR
3682 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3683 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3688 /* Subroutine for fold_truthop: determine if an operand is simple enough
3689 to be evaluated unconditionally. */
3692 simple_operand_p (const_tree exp)
3694 /* Strip any conversions that don't change the machine mode. */
3697 return (CONSTANT_CLASS_P (exp)
3698 || TREE_CODE (exp) == SSA_NAME
3700 && ! TREE_ADDRESSABLE (exp)
3701 && ! TREE_THIS_VOLATILE (exp)
3702 && ! DECL_NONLOCAL (exp)
3703 /* Don't regard global variables as simple. They may be
3704 allocated in ways unknown to the compiler (shared memory,
3705 #pragma weak, etc). */
3706 && ! TREE_PUBLIC (exp)
3707 && ! DECL_EXTERNAL (exp)
3708 /* Loading a static variable is unduly expensive, but global
3709 registers aren't expensive. */
3710 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3713 /* The following functions are subroutines to fold_range_test and allow it to
3714 try to change a logical combination of comparisons into a range test.
3717 X == 2 || X == 3 || X == 4 || X == 5
3721 (unsigned) (X - 2) <= 3
3723 We describe each set of comparisons as being either inside or outside
3724 a range, using a variable named like IN_P, and then describe the
3725 range with a lower and upper bound. If one of the bounds is omitted,
3726 it represents either the highest or lowest value of the type.
3728 In the comments below, we represent a range by two numbers in brackets
3729 preceded by a "+" to designate being inside that range, or a "-" to
3730 designate being outside that range, so the condition can be inverted by
3731 flipping the prefix. An omitted bound is represented by a "-". For
3732 example, "- [-, 10]" means being outside the range starting at the lowest
3733 possible value and ending at 10, in other words, being greater than 10.
3734 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3737 We set up things so that the missing bounds are handled in a consistent
3738 manner so neither a missing bound nor "true" and "false" need to be
3739 handled using a special case. */
3741 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3742 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3743 and UPPER1_P are nonzero if the respective argument is an upper bound
3744 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3745 must be specified for a comparison. ARG1 will be converted to ARG0's
3746 type if both are specified. */
3749 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3750 tree arg1, int upper1_p)
3756 /* If neither arg represents infinity, do the normal operation.
3757 Else, if not a comparison, return infinity. Else handle the special
3758 comparison rules. Note that most of the cases below won't occur, but
3759 are handled for consistency. */
3761 if (arg0 != 0 && arg1 != 0)
3763 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3764 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3766 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3769 if (TREE_CODE_CLASS (code) != tcc_comparison)
3772 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3773 for neither. In real maths, we cannot assume open ended ranges are
3774 the same. But, this is computer arithmetic, where numbers are finite.
3775 We can therefore make the transformation of any unbounded range with
3776 the value Z, Z being greater than any representable number. This permits
3777 us to treat unbounded ranges as equal. */
3778 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3779 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3783 result = sgn0 == sgn1;
3786 result = sgn0 != sgn1;
3789 result = sgn0 < sgn1;
3792 result = sgn0 <= sgn1;
3795 result = sgn0 > sgn1;
3798 result = sgn0 >= sgn1;
3804 return constant_boolean_node (result, type);
3807 /* Given EXP, a logical expression, set the range it is testing into
3808 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3809 actually being tested. *PLOW and *PHIGH will be made of the same
3810 type as the returned expression. If EXP is not a comparison, we
3811 will most likely not be returning a useful value and range. Set
3812 *STRICT_OVERFLOW_P to true if the return value is only valid
3813 because signed overflow is undefined; otherwise, do not change
3814 *STRICT_OVERFLOW_P. */
3817 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3818 bool *strict_overflow_p)
3820 enum tree_code code;
3821 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3822 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3824 tree low, high, n_low, n_high;
3825 location_t loc = EXPR_LOCATION (exp);
3827 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3828 and see if we can refine the range. Some of the cases below may not
3829 happen, but it doesn't seem worth worrying about this. We "continue"
3830 the outer loop when we've changed something; otherwise we "break"
3831 the switch, which will "break" the while. */
3834 low = high = build_int_cst (TREE_TYPE (exp), 0);
3838 code = TREE_CODE (exp);
3839 exp_type = TREE_TYPE (exp);
3841 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3843 if (TREE_OPERAND_LENGTH (exp) > 0)
3844 arg0 = TREE_OPERAND (exp, 0);
3845 if (TREE_CODE_CLASS (code) == tcc_comparison
3846 || TREE_CODE_CLASS (code) == tcc_unary
3847 || TREE_CODE_CLASS (code) == tcc_binary)
3848 arg0_type = TREE_TYPE (arg0);
3849 if (TREE_CODE_CLASS (code) == tcc_binary
3850 || TREE_CODE_CLASS (code) == tcc_comparison
3851 || (TREE_CODE_CLASS (code) == tcc_expression
3852 && TREE_OPERAND_LENGTH (exp) > 1))
3853 arg1 = TREE_OPERAND (exp, 1);
3858 case TRUTH_NOT_EXPR:
3859 in_p = ! in_p, exp = arg0;
3862 case EQ_EXPR: case NE_EXPR:
3863 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3864 /* We can only do something if the range is testing for zero
3865 and if the second operand is an integer constant. Note that
3866 saying something is "in" the range we make is done by
3867 complementing IN_P since it will set in the initial case of
3868 being not equal to zero; "out" is leaving it alone. */
3869 if (low == 0 || high == 0
3870 || ! integer_zerop (low) || ! integer_zerop (high)
3871 || TREE_CODE (arg1) != INTEGER_CST)
3876 case NE_EXPR: /* - [c, c] */
3879 case EQ_EXPR: /* + [c, c] */
3880 in_p = ! in_p, low = high = arg1;
3882 case GT_EXPR: /* - [-, c] */
3883 low = 0, high = arg1;
3885 case GE_EXPR: /* + [c, -] */
3886 in_p = ! in_p, low = arg1, high = 0;
3888 case LT_EXPR: /* - [c, -] */
3889 low = arg1, high = 0;
3891 case LE_EXPR: /* + [-, c] */
3892 in_p = ! in_p, low = 0, high = arg1;
3898 /* If this is an unsigned comparison, we also know that EXP is
3899 greater than or equal to zero. We base the range tests we make
3900 on that fact, so we record it here so we can parse existing
3901 range tests. We test arg0_type since often the return type
3902 of, e.g. EQ_EXPR, is boolean. */
3903 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3905 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3907 build_int_cst (arg0_type, 0),
3911 in_p = n_in_p, low = n_low, high = n_high;
3913 /* If the high bound is missing, but we have a nonzero low
3914 bound, reverse the range so it goes from zero to the low bound
3916 if (high == 0 && low && ! integer_zerop (low))
3919 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3920 integer_one_node, 0);
3921 low = build_int_cst (arg0_type, 0);
3929 /* (-x) IN [a,b] -> x in [-b, -a] */
3930 n_low = range_binop (MINUS_EXPR, exp_type,
3931 build_int_cst (exp_type, 0),
3933 n_high = range_binop (MINUS_EXPR, exp_type,
3934 build_int_cst (exp_type, 0),
3936 if (n_high != 0 && TREE_OVERFLOW (n_high))
3942 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3943 build_int_cst (exp_type, 1));
3946 case PLUS_EXPR: case MINUS_EXPR:
3947 if (TREE_CODE (arg1) != INTEGER_CST)
3950 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3951 move a constant to the other side. */
3952 if (!TYPE_UNSIGNED (arg0_type)
3953 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3956 /* If EXP is signed, any overflow in the computation is undefined,
3957 so we don't worry about it so long as our computations on
3958 the bounds don't overflow. For unsigned, overflow is defined
3959 and this is exactly the right thing. */
3960 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3961 arg0_type, low, 0, arg1, 0);
3962 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3963 arg0_type, high, 1, arg1, 0);
3964 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3965 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3968 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3969 *strict_overflow_p = true;
3972 /* Check for an unsigned range which has wrapped around the maximum
3973 value thus making n_high < n_low, and normalize it. */
3974 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3976 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3977 integer_one_node, 0);
3978 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3979 integer_one_node, 0);
3981 /* If the range is of the form +/- [ x+1, x ], we won't
3982 be able to normalize it. But then, it represents the
3983 whole range or the empty set, so make it
3985 if (tree_int_cst_equal (n_low, low)
3986 && tree_int_cst_equal (n_high, high))
3992 low = n_low, high = n_high;
3997 CASE_CONVERT: case NON_LVALUE_EXPR:
3998 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4001 if (! INTEGRAL_TYPE_P (arg0_type)
4002 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4003 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4006 n_low = low, n_high = high;
4009 n_low = fold_convert_loc (loc, arg0_type, n_low);
4012 n_high = fold_convert_loc (loc, arg0_type, n_high);
4015 /* If we're converting arg0 from an unsigned type, to exp,
4016 a signed type, we will be doing the comparison as unsigned.
4017 The tests above have already verified that LOW and HIGH
4020 So we have to ensure that we will handle large unsigned
4021 values the same way that the current signed bounds treat
4024 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4028 /* For fixed-point modes, we need to pass the saturating flag
4029 as the 2nd parameter. */
4030 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4031 equiv_type = lang_hooks.types.type_for_mode
4032 (TYPE_MODE (arg0_type),
4033 TYPE_SATURATING (arg0_type));
4035 equiv_type = lang_hooks.types.type_for_mode
4036 (TYPE_MODE (arg0_type), 1);
4038 /* A range without an upper bound is, naturally, unbounded.
4039 Since convert would have cropped a very large value, use
4040 the max value for the destination type. */
4042 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4043 : TYPE_MAX_VALUE (arg0_type);
4045 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4046 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4047 fold_convert_loc (loc, arg0_type,
4049 build_int_cst (arg0_type, 1));
4051 /* If the low bound is specified, "and" the range with the
4052 range for which the original unsigned value will be
4056 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4057 1, n_low, n_high, 1,
4058 fold_convert_loc (loc, arg0_type,
4063 in_p = (n_in_p == in_p);
4067 /* Otherwise, "or" the range with the range of the input
4068 that will be interpreted as negative. */
4069 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4070 0, n_low, n_high, 1,
4071 fold_convert_loc (loc, arg0_type,
4076 in_p = (in_p != n_in_p);
4081 low = n_low, high = n_high;
4091 /* If EXP is a constant, we can evaluate whether this is true or false. */
4092 if (TREE_CODE (exp) == INTEGER_CST)
4094 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4096 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4102 *pin_p = in_p, *plow = low, *phigh = high;
4106 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4107 type, TYPE, return an expression to test if EXP is in (or out of, depending
4108 on IN_P) the range. Return 0 if the test couldn't be created. */
4111 build_range_check (location_t loc, tree type, tree exp, int in_p,
4112 tree low, tree high)
4114 tree etype = TREE_TYPE (exp), value;
4116 #ifdef HAVE_canonicalize_funcptr_for_compare
4117 /* Disable this optimization for function pointer expressions
4118 on targets that require function pointer canonicalization. */
4119 if (HAVE_canonicalize_funcptr_for_compare
4120 && TREE_CODE (etype) == POINTER_TYPE
4121 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4127 value = build_range_check (loc, type, exp, 1, low, high);
4129 return invert_truthvalue_loc (loc, value);
4134 if (low == 0 && high == 0)
4135 return build_int_cst (type, 1);
4138 return fold_build2_loc (loc, LE_EXPR, type, exp,
4139 fold_convert_loc (loc, etype, high));
4142 return fold_build2_loc (loc, GE_EXPR, type, exp,
4143 fold_convert_loc (loc, etype, low));
4145 if (operand_equal_p (low, high, 0))
4146 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4147 fold_convert_loc (loc, etype, low));
4149 if (integer_zerop (low))
4151 if (! TYPE_UNSIGNED (etype))
4153 etype = unsigned_type_for (etype);
4154 high = fold_convert_loc (loc, etype, high);
4155 exp = fold_convert_loc (loc, etype, exp);
4157 return build_range_check (loc, type, exp, 1, 0, high);
4160 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4161 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4163 unsigned HOST_WIDE_INT lo;
4167 prec = TYPE_PRECISION (etype);
4168 if (prec <= HOST_BITS_PER_WIDE_INT)
4171 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4175 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4176 lo = (unsigned HOST_WIDE_INT) -1;
4179 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4181 if (TYPE_UNSIGNED (etype))
4183 tree signed_etype = signed_type_for (etype);
4184 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4186 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4188 etype = signed_etype;
4189 exp = fold_convert_loc (loc, etype, exp);
4191 return fold_build2_loc (loc, GT_EXPR, type, exp,
4192 build_int_cst (etype, 0));
4196 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4197 This requires wrap-around arithmetics for the type of the expression.
4198 First make sure that arithmetics in this type is valid, then make sure
4199 that it wraps around. */
4200 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4201 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4202 TYPE_UNSIGNED (etype));
4204 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4206 tree utype, minv, maxv;
4208 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4209 for the type in question, as we rely on this here. */
4210 utype = unsigned_type_for (etype);
4211 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4212 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4213 integer_one_node, 1);
4214 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4216 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4223 high = fold_convert_loc (loc, etype, high);
4224 low = fold_convert_loc (loc, etype, low);
4225 exp = fold_convert_loc (loc, etype, exp);
4227 value = const_binop (MINUS_EXPR, high, low);
4230 if (POINTER_TYPE_P (etype))
4232 if (value != 0 && !TREE_OVERFLOW (value))
4234 low = fold_convert_loc (loc, sizetype, low);
4235 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4236 return build_range_check (loc, type,
4237 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4239 1, build_int_cst (etype, 0), value);
4244 if (value != 0 && !TREE_OVERFLOW (value))
4245 return build_range_check (loc, type,
4246 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4247 1, build_int_cst (etype, 0), value);
4252 /* Return the predecessor of VAL in its type, handling the infinite case. */
4255 range_predecessor (tree val)
4257 tree type = TREE_TYPE (val);
4259 if (INTEGRAL_TYPE_P (type)
4260 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4263 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4266 /* Return the successor of VAL in its type, handling the infinite case. */
4269 range_successor (tree val)
4271 tree type = TREE_TYPE (val);
4273 if (INTEGRAL_TYPE_P (type)
4274 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4277 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4280 /* Given two ranges, see if we can merge them into one. Return 1 if we
4281 can, 0 if we can't. Set the output range into the specified parameters. */
4284 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4285 tree high0, int in1_p, tree low1, tree high1)
4293 int lowequal = ((low0 == 0 && low1 == 0)
4294 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4295 low0, 0, low1, 0)));
4296 int highequal = ((high0 == 0 && high1 == 0)
4297 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4298 high0, 1, high1, 1)));
4300 /* Make range 0 be the range that starts first, or ends last if they
4301 start at the same value. Swap them if it isn't. */
4302 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4305 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4306 high1, 1, high0, 1))))
4308 temp = in0_p, in0_p = in1_p, in1_p = temp;
4309 tem = low0, low0 = low1, low1 = tem;
4310 tem = high0, high0 = high1, high1 = tem;
4313 /* Now flag two cases, whether the ranges are disjoint or whether the
4314 second range is totally subsumed in the first. Note that the tests
4315 below are simplified by the ones above. */
4316 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4317 high0, 1, low1, 0));
4318 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4319 high1, 1, high0, 1));
4321 /* We now have four cases, depending on whether we are including or
4322 excluding the two ranges. */
4325 /* If they don't overlap, the result is false. If the second range
4326 is a subset it is the result. Otherwise, the range is from the start
4327 of the second to the end of the first. */
4329 in_p = 0, low = high = 0;
4331 in_p = 1, low = low1, high = high1;
4333 in_p = 1, low = low1, high = high0;
4336 else if (in0_p && ! in1_p)
4338 /* If they don't overlap, the result is the first range. If they are
4339 equal, the result is false. If the second range is a subset of the
4340 first, and the ranges begin at the same place, we go from just after
4341 the end of the second range to the end of the first. If the second
4342 range is not a subset of the first, or if it is a subset and both
4343 ranges end at the same place, the range starts at the start of the
4344 first range and ends just before the second range.
4345 Otherwise, we can't describe this as a single range. */
4347 in_p = 1, low = low0, high = high0;
4348 else if (lowequal && highequal)
4349 in_p = 0, low = high = 0;
4350 else if (subset && lowequal)
4352 low = range_successor (high1);
4357 /* We are in the weird situation where high0 > high1 but
4358 high1 has no successor. Punt. */
4362 else if (! subset || highequal)
4365 high = range_predecessor (low1);
4369 /* low0 < low1 but low1 has no predecessor. Punt. */
4377 else if (! in0_p && in1_p)
4379 /* If they don't overlap, the result is the second range. If the second
4380 is a subset of the first, the result is false. Otherwise,
4381 the range starts just after the first range and ends at the
4382 end of the second. */
4384 in_p = 1, low = low1, high = high1;
4385 else if (subset || highequal)
4386 in_p = 0, low = high = 0;
4389 low = range_successor (high0);
4394 /* high1 > high0 but high0 has no successor. Punt. */
4402 /* The case where we are excluding both ranges. Here the complex case
4403 is if they don't overlap. In that case, the only time we have a
4404 range is if they are adjacent. If the second is a subset of the
4405 first, the result is the first. Otherwise, the range to exclude
4406 starts at the beginning of the first range and ends at the end of the
4410 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4411 range_successor (high0),
4413 in_p = 0, low = low0, high = high1;
4416 /* Canonicalize - [min, x] into - [-, x]. */
4417 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4418 switch (TREE_CODE (TREE_TYPE (low0)))
4421 if (TYPE_PRECISION (TREE_TYPE (low0))
4422 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4426 if (tree_int_cst_equal (low0,
4427 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4431 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4432 && integer_zerop (low0))
4439 /* Canonicalize - [x, max] into - [x, -]. */
4440 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4441 switch (TREE_CODE (TREE_TYPE (high1)))
4444 if (TYPE_PRECISION (TREE_TYPE (high1))
4445 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4449 if (tree_int_cst_equal (high1,
4450 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4454 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4455 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4457 integer_one_node, 1)))
4464 /* The ranges might be also adjacent between the maximum and
4465 minimum values of the given type. For
4466 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4467 return + [x + 1, y - 1]. */
4468 if (low0 == 0 && high1 == 0)
4470 low = range_successor (high0);
4471 high = range_predecessor (low1);
4472 if (low == 0 || high == 0)
4482 in_p = 0, low = low0, high = high0;
4484 in_p = 0, low = low0, high = high1;
4487 *pin_p = in_p, *plow = low, *phigh = high;
4492 /* Subroutine of fold, looking inside expressions of the form
4493 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4494 of the COND_EXPR. This function is being used also to optimize
4495 A op B ? C : A, by reversing the comparison first.
4497 Return a folded expression whose code is not a COND_EXPR
4498 anymore, or NULL_TREE if no folding opportunity is found. */
4501 fold_cond_expr_with_comparison (location_t loc, tree type,
4502 tree arg0, tree arg1, tree arg2)
4504 enum tree_code comp_code = TREE_CODE (arg0);
4505 tree arg00 = TREE_OPERAND (arg0, 0);
4506 tree arg01 = TREE_OPERAND (arg0, 1);
4507 tree arg1_type = TREE_TYPE (arg1);
4513 /* If we have A op 0 ? A : -A, consider applying the following
4516 A == 0? A : -A same as -A
4517 A != 0? A : -A same as A
4518 A >= 0? A : -A same as abs (A)
4519 A > 0? A : -A same as abs (A)
4520 A <= 0? A : -A same as -abs (A)
4521 A < 0? A : -A same as -abs (A)
4523 None of these transformations work for modes with signed
4524 zeros. If A is +/-0, the first two transformations will
4525 change the sign of the result (from +0 to -0, or vice
4526 versa). The last four will fix the sign of the result,
4527 even though the original expressions could be positive or
4528 negative, depending on the sign of A.
4530 Note that all these transformations are correct if A is
4531 NaN, since the two alternatives (A and -A) are also NaNs. */
4532 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4533 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4534 ? real_zerop (arg01)
4535 : integer_zerop (arg01))
4536 && ((TREE_CODE (arg2) == NEGATE_EXPR
4537 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4538 /* In the case that A is of the form X-Y, '-A' (arg2) may
4539 have already been folded to Y-X, check for that. */
4540 || (TREE_CODE (arg1) == MINUS_EXPR
4541 && TREE_CODE (arg2) == MINUS_EXPR
4542 && operand_equal_p (TREE_OPERAND (arg1, 0),
4543 TREE_OPERAND (arg2, 1), 0)
4544 && operand_equal_p (TREE_OPERAND (arg1, 1),
4545 TREE_OPERAND (arg2, 0), 0))))
4550 tem = fold_convert_loc (loc, arg1_type, arg1);
4551 return pedantic_non_lvalue_loc (loc,
4552 fold_convert_loc (loc, type,
4553 negate_expr (tem)));
4556 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4559 if (flag_trapping_math)
4564 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4565 arg1 = fold_convert_loc (loc, signed_type_for
4566 (TREE_TYPE (arg1)), arg1);
4567 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4568 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4571 if (flag_trapping_math)
4575 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4576 arg1 = fold_convert_loc (loc, signed_type_for
4577 (TREE_TYPE (arg1)), arg1);
4578 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4579 return negate_expr (fold_convert_loc (loc, type, tem));
4581 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4585 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4586 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4587 both transformations are correct when A is NaN: A != 0
4588 is then true, and A == 0 is false. */
4590 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4591 && integer_zerop (arg01) && integer_zerop (arg2))
4593 if (comp_code == NE_EXPR)
4594 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4595 else if (comp_code == EQ_EXPR)
4596 return build_int_cst (type, 0);
4599 /* Try some transformations of A op B ? A : B.
4601 A == B? A : B same as B
4602 A != B? A : B same as A
4603 A >= B? A : B same as max (A, B)
4604 A > B? A : B same as max (B, A)
4605 A <= B? A : B same as min (A, B)
4606 A < B? A : B same as min (B, A)
4608 As above, these transformations don't work in the presence
4609 of signed zeros. For example, if A and B are zeros of
4610 opposite sign, the first two transformations will change
4611 the sign of the result. In the last four, the original
4612 expressions give different results for (A=+0, B=-0) and
4613 (A=-0, B=+0), but the transformed expressions do not.
4615 The first two transformations are correct if either A or B
4616 is a NaN. In the first transformation, the condition will
4617 be false, and B will indeed be chosen. In the case of the
4618 second transformation, the condition A != B will be true,
4619 and A will be chosen.
4621 The conversions to max() and min() are not correct if B is
4622 a number and A is not. The conditions in the original
4623 expressions will be false, so all four give B. The min()
4624 and max() versions would give a NaN instead. */
4625 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4626 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4627 /* Avoid these transformations if the COND_EXPR may be used
4628 as an lvalue in the C++ front-end. PR c++/19199. */
4630 || (strcmp (lang_hooks.name, "GNU C++") != 0
4631 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4632 || ! maybe_lvalue_p (arg1)
4633 || ! maybe_lvalue_p (arg2)))
4635 tree comp_op0 = arg00;
4636 tree comp_op1 = arg01;
4637 tree comp_type = TREE_TYPE (comp_op0);
4639 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4640 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4650 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4652 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4657 /* In C++ a ?: expression can be an lvalue, so put the
4658 operand which will be used if they are equal first
4659 so that we can convert this back to the
4660 corresponding COND_EXPR. */
4661 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4663 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4664 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4665 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4666 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4667 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4668 comp_op1, comp_op0);
4669 return pedantic_non_lvalue_loc (loc,
4670 fold_convert_loc (loc, type, tem));
4677 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4679 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4680 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4681 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4682 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4683 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4684 comp_op1, comp_op0);
4685 return pedantic_non_lvalue_loc (loc,
4686 fold_convert_loc (loc, type, tem));
4690 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4691 return pedantic_non_lvalue_loc (loc,
4692 fold_convert_loc (loc, type, arg2));
4695 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4696 return pedantic_non_lvalue_loc (loc,
4697 fold_convert_loc (loc, type, arg1));
4700 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4705 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4706 we might still be able to simplify this. For example,
4707 if C1 is one less or one more than C2, this might have started
4708 out as a MIN or MAX and been transformed by this function.
4709 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4711 if (INTEGRAL_TYPE_P (type)
4712 && TREE_CODE (arg01) == INTEGER_CST
4713 && TREE_CODE (arg2) == INTEGER_CST)
4717 if (TREE_CODE (arg1) == INTEGER_CST)
4719 /* We can replace A with C1 in this case. */
4720 arg1 = fold_convert_loc (loc, type, arg01);
4721 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4724 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4725 MIN_EXPR, to preserve the signedness of the comparison. */
4726 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4728 && operand_equal_p (arg01,
4729 const_binop (PLUS_EXPR, arg2,
4730 build_int_cst (type, 1)),
4733 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4734 fold_convert_loc (loc, TREE_TYPE (arg00),
4736 return pedantic_non_lvalue_loc (loc,
4737 fold_convert_loc (loc, type, tem));
4742 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4744 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4746 && operand_equal_p (arg01,
4747 const_binop (MINUS_EXPR, arg2,
4748 build_int_cst (type, 1)),
4751 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4752 fold_convert_loc (loc, TREE_TYPE (arg00),
4754 return pedantic_non_lvalue_loc (loc,
4755 fold_convert_loc (loc, type, tem));
4760 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4761 MAX_EXPR, to preserve the signedness of the comparison. */
4762 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4764 && operand_equal_p (arg01,
4765 const_binop (MINUS_EXPR, arg2,
4766 build_int_cst (type, 1)),
4769 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4770 fold_convert_loc (loc, TREE_TYPE (arg00),
4772 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4777 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4778 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4780 && operand_equal_p (arg01,
4781 const_binop (PLUS_EXPR, arg2,
4782 build_int_cst (type, 1)),
4785 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4786 fold_convert_loc (loc, TREE_TYPE (arg00),
4788 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4802 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4803 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4804 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4808 /* EXP is some logical combination of boolean tests. See if we can
4809 merge it into some range test. Return the new tree if so. */
4812 fold_range_test (location_t loc, enum tree_code code, tree type,
4815 int or_op = (code == TRUTH_ORIF_EXPR
4816 || code == TRUTH_OR_EXPR);
4817 int in0_p, in1_p, in_p;
4818 tree low0, low1, low, high0, high1, high;
4819 bool strict_overflow_p = false;
4820 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4821 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4823 const char * const warnmsg = G_("assuming signed overflow does not occur "
4824 "when simplifying range test");
4826 /* If this is an OR operation, invert both sides; we will invert
4827 again at the end. */
4829 in0_p = ! in0_p, in1_p = ! in1_p;
4831 /* If both expressions are the same, if we can merge the ranges, and we
4832 can build the range test, return it or it inverted. If one of the
4833 ranges is always true or always false, consider it to be the same
4834 expression as the other. */
4835 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4836 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4838 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4840 : rhs != 0 ? rhs : integer_zero_node,
4843 if (strict_overflow_p)
4844 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4845 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4848 /* On machines where the branch cost is expensive, if this is a
4849 short-circuited branch and the underlying object on both sides
4850 is the same, make a non-short-circuit operation. */
4851 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4852 && lhs != 0 && rhs != 0
4853 && (code == TRUTH_ANDIF_EXPR
4854 || code == TRUTH_ORIF_EXPR)
4855 && operand_equal_p (lhs, rhs, 0))
4857 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4858 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4859 which cases we can't do this. */
4860 if (simple_operand_p (lhs))
4861 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4862 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4865 else if (lang_hooks.decls.global_bindings_p () == 0
4866 && ! CONTAINS_PLACEHOLDER_P (lhs))
4868 tree common = save_expr (lhs);
4870 if (0 != (lhs = build_range_check (loc, type, common,
4871 or_op ? ! in0_p : in0_p,
4873 && (0 != (rhs = build_range_check (loc, type, common,
4874 or_op ? ! in1_p : in1_p,
4877 if (strict_overflow_p)
4878 fold_overflow_warning (warnmsg,
4879 WARN_STRICT_OVERFLOW_COMPARISON);
4880 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4881 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4890 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4891 bit value. Arrange things so the extra bits will be set to zero if and
4892 only if C is signed-extended to its full width. If MASK is nonzero,
4893 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4896 unextend (tree c, int p, int unsignedp, tree mask)
4898 tree type = TREE_TYPE (c);
4899 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4902 if (p == modesize || unsignedp)
4905 /* We work by getting just the sign bit into the low-order bit, then
4906 into the high-order bit, then sign-extend. We then XOR that value
4908 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4909 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4911 /* We must use a signed type in order to get an arithmetic right shift.
4912 However, we must also avoid introducing accidental overflows, so that
4913 a subsequent call to integer_zerop will work. Hence we must
4914 do the type conversion here. At this point, the constant is either
4915 zero or one, and the conversion to a signed type can never overflow.
4916 We could get an overflow if this conversion is done anywhere else. */
4917 if (TYPE_UNSIGNED (type))
4918 temp = fold_convert (signed_type_for (type), temp);
4920 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4921 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4923 temp = const_binop (BIT_AND_EXPR, temp,
4924 fold_convert (TREE_TYPE (c), mask));
4925 /* If necessary, convert the type back to match the type of C. */
4926 if (TYPE_UNSIGNED (type))
4927 temp = fold_convert (type, temp);
4929 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4932 /* For an expression that has the form
4936 we can drop one of the inner expressions and simplify to
4940 LOC is the location of the resulting expression. OP is the inner
4941 logical operation; the left-hand side in the examples above, while CMPOP
4942 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4943 removing a condition that guards another, as in
4944 (A != NULL && A->...) || A == NULL
4945 which we must not transform. If RHS_ONLY is true, only eliminate the
4946 right-most operand of the inner logical operation. */
4949 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4952 tree type = TREE_TYPE (cmpop);
4953 enum tree_code code = TREE_CODE (cmpop);
4954 enum tree_code truthop_code = TREE_CODE (op);
4955 tree lhs = TREE_OPERAND (op, 0);
4956 tree rhs = TREE_OPERAND (op, 1);
4957 tree orig_lhs = lhs, orig_rhs = rhs;
4958 enum tree_code rhs_code = TREE_CODE (rhs);
4959 enum tree_code lhs_code = TREE_CODE (lhs);
4960 enum tree_code inv_code;
4962 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4965 if (TREE_CODE_CLASS (code) != tcc_comparison)
4968 if (rhs_code == truthop_code)
4970 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4971 if (newrhs != NULL_TREE)
4974 rhs_code = TREE_CODE (rhs);
4977 if (lhs_code == truthop_code && !rhs_only)
4979 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4980 if (newlhs != NULL_TREE)
4983 lhs_code = TREE_CODE (lhs);
4987 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4988 if (inv_code == rhs_code
4989 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4990 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4992 if (!rhs_only && inv_code == lhs_code
4993 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4994 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4996 if (rhs != orig_rhs || lhs != orig_lhs)
4997 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5002 /* Find ways of folding logical expressions of LHS and RHS:
5003 Try to merge two comparisons to the same innermost item.
5004 Look for range tests like "ch >= '0' && ch <= '9'".
5005 Look for combinations of simple terms on machines with expensive branches
5006 and evaluate the RHS unconditionally.
5008 For example, if we have p->a == 2 && p->b == 4 and we can make an
5009 object large enough to span both A and B, we can do this with a comparison
5010 against the object ANDed with the a mask.
5012 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5013 operations to do this with one comparison.
5015 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5016 function and the one above.
5018 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5019 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5021 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5024 We return the simplified tree or 0 if no optimization is possible. */
5027 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5030 /* If this is the "or" of two comparisons, we can do something if
5031 the comparisons are NE_EXPR. If this is the "and", we can do something
5032 if the comparisons are EQ_EXPR. I.e.,
5033 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5035 WANTED_CODE is this operation code. For single bit fields, we can
5036 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5037 comparison for one-bit fields. */
5039 enum tree_code wanted_code;
5040 enum tree_code lcode, rcode;
5041 tree ll_arg, lr_arg, rl_arg, rr_arg;
5042 tree ll_inner, lr_inner, rl_inner, rr_inner;
5043 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5044 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5045 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5046 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5047 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5048 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5049 enum machine_mode lnmode, rnmode;
5050 tree ll_mask, lr_mask, rl_mask, rr_mask;
5051 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5052 tree l_const, r_const;
5053 tree lntype, rntype, result;
5054 HOST_WIDE_INT first_bit, end_bit;
5056 tree orig_lhs = lhs, orig_rhs = rhs;
5057 enum tree_code orig_code = code;
5059 /* Start by getting the comparison codes. Fail if anything is volatile.
5060 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5061 it were surrounded with a NE_EXPR. */
5063 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5066 lcode = TREE_CODE (lhs);
5067 rcode = TREE_CODE (rhs);
5069 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5071 lhs = build2 (NE_EXPR, truth_type, lhs,
5072 build_int_cst (TREE_TYPE (lhs), 0));
5076 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5078 rhs = build2 (NE_EXPR, truth_type, rhs,
5079 build_int_cst (TREE_TYPE (rhs), 0));
5083 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5084 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5087 ll_arg = TREE_OPERAND (lhs, 0);
5088 lr_arg = TREE_OPERAND (lhs, 1);
5089 rl_arg = TREE_OPERAND (rhs, 0);
5090 rr_arg = TREE_OPERAND (rhs, 1);
5092 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5093 if (simple_operand_p (ll_arg)
5094 && simple_operand_p (lr_arg))
5096 if (operand_equal_p (ll_arg, rl_arg, 0)
5097 && operand_equal_p (lr_arg, rr_arg, 0))
5099 result = combine_comparisons (loc, code, lcode, rcode,
5100 truth_type, ll_arg, lr_arg);
5104 else if (operand_equal_p (ll_arg, rr_arg, 0)
5105 && operand_equal_p (lr_arg, rl_arg, 0))
5107 result = combine_comparisons (loc, code, lcode,
5108 swap_tree_comparison (rcode),
5109 truth_type, ll_arg, lr_arg);
5115 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5116 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5118 /* If the RHS can be evaluated unconditionally and its operands are
5119 simple, it wins to evaluate the RHS unconditionally on machines
5120 with expensive branches. In this case, this isn't a comparison
5121 that can be merged. Avoid doing this if the RHS is a floating-point
5122 comparison since those can trap. */
5124 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5126 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5127 && simple_operand_p (rl_arg)
5128 && simple_operand_p (rr_arg))
5130 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5131 if (code == TRUTH_OR_EXPR
5132 && lcode == NE_EXPR && integer_zerop (lr_arg)
5133 && rcode == NE_EXPR && integer_zerop (rr_arg)
5134 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5135 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5136 return build2_loc (loc, NE_EXPR, truth_type,
5137 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5139 build_int_cst (TREE_TYPE (ll_arg), 0));
5141 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5142 if (code == TRUTH_AND_EXPR
5143 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5144 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5145 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5146 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5147 return build2_loc (loc, EQ_EXPR, truth_type,
5148 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5150 build_int_cst (TREE_TYPE (ll_arg), 0));
5152 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5154 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5155 return build2_loc (loc, code, truth_type, lhs, rhs);
5160 /* See if the comparisons can be merged. Then get all the parameters for
5163 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5164 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5168 ll_inner = decode_field_reference (loc, ll_arg,
5169 &ll_bitsize, &ll_bitpos, &ll_mode,
5170 &ll_unsignedp, &volatilep, &ll_mask,
5172 lr_inner = decode_field_reference (loc, lr_arg,
5173 &lr_bitsize, &lr_bitpos, &lr_mode,
5174 &lr_unsignedp, &volatilep, &lr_mask,
5176 rl_inner = decode_field_reference (loc, rl_arg,
5177 &rl_bitsize, &rl_bitpos, &rl_mode,
5178 &rl_unsignedp, &volatilep, &rl_mask,
5180 rr_inner = decode_field_reference (loc, rr_arg,
5181 &rr_bitsize, &rr_bitpos, &rr_mode,
5182 &rr_unsignedp, &volatilep, &rr_mask,
5185 /* It must be true that the inner operation on the lhs of each
5186 comparison must be the same if we are to be able to do anything.
5187 Then see if we have constants. If not, the same must be true for
5189 if (volatilep || ll_inner == 0 || rl_inner == 0
5190 || ! operand_equal_p (ll_inner, rl_inner, 0))
5193 if (TREE_CODE (lr_arg) == INTEGER_CST
5194 && TREE_CODE (rr_arg) == INTEGER_CST)
5195 l_const = lr_arg, r_const = rr_arg;
5196 else if (lr_inner == 0 || rr_inner == 0
5197 || ! operand_equal_p (lr_inner, rr_inner, 0))
5200 l_const = r_const = 0;
5202 /* If either comparison code is not correct for our logical operation,
5203 fail. However, we can convert a one-bit comparison against zero into
5204 the opposite comparison against that bit being set in the field. */
5206 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5207 if (lcode != wanted_code)
5209 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5211 /* Make the left operand unsigned, since we are only interested
5212 in the value of one bit. Otherwise we are doing the wrong
5221 /* This is analogous to the code for l_const above. */
5222 if (rcode != wanted_code)
5224 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5233 /* See if we can find a mode that contains both fields being compared on
5234 the left. If we can't, fail. Otherwise, update all constants and masks
5235 to be relative to a field of that size. */
5236 first_bit = MIN (ll_bitpos, rl_bitpos);
5237 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5238 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5239 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5241 if (lnmode == VOIDmode)
5244 lnbitsize = GET_MODE_BITSIZE (lnmode);
5245 lnbitpos = first_bit & ~ (lnbitsize - 1);
5246 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5247 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5249 if (BYTES_BIG_ENDIAN)
5251 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5252 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5255 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5256 size_int (xll_bitpos));
5257 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5258 size_int (xrl_bitpos));
5262 l_const = fold_convert_loc (loc, lntype, l_const);
5263 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5264 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5265 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5266 fold_build1_loc (loc, BIT_NOT_EXPR,
5269 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5271 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5276 r_const = fold_convert_loc (loc, lntype, r_const);
5277 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5278 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5279 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5280 fold_build1_loc (loc, BIT_NOT_EXPR,
5283 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5285 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5289 /* If the right sides are not constant, do the same for it. Also,
5290 disallow this optimization if a size or signedness mismatch occurs
5291 between the left and right sides. */
5294 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5295 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5296 /* Make sure the two fields on the right
5297 correspond to the left without being swapped. */
5298 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5301 first_bit = MIN (lr_bitpos, rr_bitpos);
5302 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5303 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5304 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5306 if (rnmode == VOIDmode)
5309 rnbitsize = GET_MODE_BITSIZE (rnmode);
5310 rnbitpos = first_bit & ~ (rnbitsize - 1);
5311 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5312 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5314 if (BYTES_BIG_ENDIAN)
5316 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5317 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5320 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5322 size_int (xlr_bitpos));
5323 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5325 size_int (xrr_bitpos));
5327 /* Make a mask that corresponds to both fields being compared.
5328 Do this for both items being compared. If the operands are the
5329 same size and the bits being compared are in the same position
5330 then we can do this by masking both and comparing the masked
5332 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5333 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5334 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5336 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5337 ll_unsignedp || rl_unsignedp);
5338 if (! all_ones_mask_p (ll_mask, lnbitsize))
5339 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5341 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5342 lr_unsignedp || rr_unsignedp);
5343 if (! all_ones_mask_p (lr_mask, rnbitsize))
5344 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5346 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5349 /* There is still another way we can do something: If both pairs of
5350 fields being compared are adjacent, we may be able to make a wider
5351 field containing them both.
5353 Note that we still must mask the lhs/rhs expressions. Furthermore,
5354 the mask must be shifted to account for the shift done by
5355 make_bit_field_ref. */
5356 if ((ll_bitsize + ll_bitpos == rl_bitpos
5357 && lr_bitsize + lr_bitpos == rr_bitpos)
5358 || (ll_bitpos == rl_bitpos + rl_bitsize
5359 && lr_bitpos == rr_bitpos + rr_bitsize))
5363 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5364 ll_bitsize + rl_bitsize,
5365 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5366 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5367 lr_bitsize + rr_bitsize,
5368 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5370 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5371 size_int (MIN (xll_bitpos, xrl_bitpos)));
5372 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5373 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5375 /* Convert to the smaller type before masking out unwanted bits. */
5377 if (lntype != rntype)
5379 if (lnbitsize > rnbitsize)
5381 lhs = fold_convert_loc (loc, rntype, lhs);
5382 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5385 else if (lnbitsize < rnbitsize)
5387 rhs = fold_convert_loc (loc, lntype, rhs);
5388 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5393 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5394 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5396 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5397 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5399 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5405 /* Handle the case of comparisons with constants. If there is something in
5406 common between the masks, those bits of the constants must be the same.
5407 If not, the condition is always false. Test for this to avoid generating
5408 incorrect code below. */
5409 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5410 if (! integer_zerop (result)
5411 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5412 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5414 if (wanted_code == NE_EXPR)
5416 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5417 return constant_boolean_node (true, truth_type);
5421 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5422 return constant_boolean_node (false, truth_type);
5426 /* Construct the expression we will return. First get the component
5427 reference we will make. Unless the mask is all ones the width of
5428 that field, perform the mask operation. Then compare with the
5430 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5431 ll_unsignedp || rl_unsignedp);
5433 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5434 if (! all_ones_mask_p (ll_mask, lnbitsize))
5435 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5437 return build2_loc (loc, wanted_code, truth_type, result,
5438 const_binop (BIT_IOR_EXPR, l_const, r_const));
5441 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5445 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5449 enum tree_code op_code;
5452 int consts_equal, consts_lt;
5455 STRIP_SIGN_NOPS (arg0);
5457 op_code = TREE_CODE (arg0);
5458 minmax_const = TREE_OPERAND (arg0, 1);
5459 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5460 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5461 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5462 inner = TREE_OPERAND (arg0, 0);
5464 /* If something does not permit us to optimize, return the original tree. */
5465 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5466 || TREE_CODE (comp_const) != INTEGER_CST
5467 || TREE_OVERFLOW (comp_const)
5468 || TREE_CODE (minmax_const) != INTEGER_CST
5469 || TREE_OVERFLOW (minmax_const))
5472 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5473 and GT_EXPR, doing the rest with recursive calls using logical
5477 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5480 = optimize_minmax_comparison (loc,
5481 invert_tree_comparison (code, false),
5484 return invert_truthvalue_loc (loc, tem);
5490 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5491 optimize_minmax_comparison
5492 (loc, EQ_EXPR, type, arg0, comp_const),
5493 optimize_minmax_comparison
5494 (loc, GT_EXPR, type, arg0, comp_const));
5497 if (op_code == MAX_EXPR && consts_equal)
5498 /* MAX (X, 0) == 0 -> X <= 0 */
5499 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5501 else if (op_code == MAX_EXPR && consts_lt)
5502 /* MAX (X, 0) == 5 -> X == 5 */
5503 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5505 else if (op_code == MAX_EXPR)
5506 /* MAX (X, 0) == -1 -> false */
5507 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5509 else if (consts_equal)
5510 /* MIN (X, 0) == 0 -> X >= 0 */
5511 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5514 /* MIN (X, 0) == 5 -> false */
5515 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5518 /* MIN (X, 0) == -1 -> X == -1 */
5519 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5522 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5523 /* MAX (X, 0) > 0 -> X > 0
5524 MAX (X, 0) > 5 -> X > 5 */
5525 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5527 else if (op_code == MAX_EXPR)
5528 /* MAX (X, 0) > -1 -> true */
5529 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5531 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5532 /* MIN (X, 0) > 0 -> false
5533 MIN (X, 0) > 5 -> false */
5534 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5537 /* MIN (X, 0) > -1 -> X > -1 */
5538 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5545 /* T is an integer expression that is being multiplied, divided, or taken a
5546 modulus (CODE says which and what kind of divide or modulus) by a
5547 constant C. See if we can eliminate that operation by folding it with
5548 other operations already in T. WIDE_TYPE, if non-null, is a type that
5549 should be used for the computation if wider than our type.
5551 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5552 (X * 2) + (Y * 4). We must, however, be assured that either the original
5553 expression would not overflow or that overflow is undefined for the type
5554 in the language in question.
5556 If we return a non-null expression, it is an equivalent form of the
5557 original computation, but need not be in the original type.
5559 We set *STRICT_OVERFLOW_P to true if the return values depends on
5560 signed overflow being undefined. Otherwise we do not change
5561 *STRICT_OVERFLOW_P. */
5564 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5565 bool *strict_overflow_p)
5567 /* To avoid exponential search depth, refuse to allow recursion past
5568 three levels. Beyond that (1) it's highly unlikely that we'll find
5569 something interesting and (2) we've probably processed it before
5570 when we built the inner expression. */
5579 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5586 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5587 bool *strict_overflow_p)
5589 tree type = TREE_TYPE (t);
5590 enum tree_code tcode = TREE_CODE (t);
5591 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5592 > GET_MODE_SIZE (TYPE_MODE (type)))
5593 ? wide_type : type);
5595 int same_p = tcode == code;
5596 tree op0 = NULL_TREE, op1 = NULL_TREE;
5597 bool sub_strict_overflow_p;
5599 /* Don't deal with constants of zero here; they confuse the code below. */
5600 if (integer_zerop (c))
5603 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5604 op0 = TREE_OPERAND (t, 0);
5606 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5607 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5609 /* Note that we need not handle conditional operations here since fold
5610 already handles those cases. So just do arithmetic here. */
5614 /* For a constant, we can always simplify if we are a multiply
5615 or (for divide and modulus) if it is a multiple of our constant. */
5616 if (code == MULT_EXPR
5617 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5618 return const_binop (code, fold_convert (ctype, t),
5619 fold_convert (ctype, c));
5622 CASE_CONVERT: case NON_LVALUE_EXPR:
5623 /* If op0 is an expression ... */
5624 if ((COMPARISON_CLASS_P (op0)
5625 || UNARY_CLASS_P (op0)
5626 || BINARY_CLASS_P (op0)
5627 || VL_EXP_CLASS_P (op0)
5628 || EXPRESSION_CLASS_P (op0))
5629 /* ... and has wrapping overflow, and its type is smaller
5630 than ctype, then we cannot pass through as widening. */
5631 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5632 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5633 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5634 && (TYPE_PRECISION (ctype)
5635 > TYPE_PRECISION (TREE_TYPE (op0))))
5636 /* ... or this is a truncation (t is narrower than op0),
5637 then we cannot pass through this narrowing. */
5638 || (TYPE_PRECISION (type)
5639 < TYPE_PRECISION (TREE_TYPE (op0)))
5640 /* ... or signedness changes for division or modulus,
5641 then we cannot pass through this conversion. */
5642 || (code != MULT_EXPR
5643 && (TYPE_UNSIGNED (ctype)
5644 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5645 /* ... or has undefined overflow while the converted to
5646 type has not, we cannot do the operation in the inner type
5647 as that would introduce undefined overflow. */
5648 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5649 && !TYPE_OVERFLOW_UNDEFINED (type))))
5652 /* Pass the constant down and see if we can make a simplification. If
5653 we can, replace this expression with the inner simplification for
5654 possible later conversion to our or some other type. */
5655 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5656 && TREE_CODE (t2) == INTEGER_CST
5657 && !TREE_OVERFLOW (t2)
5658 && (0 != (t1 = extract_muldiv (op0, t2, code,
5660 ? ctype : NULL_TREE,
5661 strict_overflow_p))))
5666 /* If widening the type changes it from signed to unsigned, then we
5667 must avoid building ABS_EXPR itself as unsigned. */
5668 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5670 tree cstype = (*signed_type_for) (ctype);
5671 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5674 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5675 return fold_convert (ctype, t1);
5679 /* If the constant is negative, we cannot simplify this. */
5680 if (tree_int_cst_sgn (c) == -1)
5684 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5686 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5689 case MIN_EXPR: case MAX_EXPR:
5690 /* If widening the type changes the signedness, then we can't perform
5691 this optimization as that changes the result. */
5692 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5695 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5696 sub_strict_overflow_p = false;
5697 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5698 &sub_strict_overflow_p)) != 0
5699 && (t2 = extract_muldiv (op1, c, code, wide_type,
5700 &sub_strict_overflow_p)) != 0)
5702 if (tree_int_cst_sgn (c) < 0)
5703 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5704 if (sub_strict_overflow_p)
5705 *strict_overflow_p = true;
5706 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5707 fold_convert (ctype, t2));
5711 case LSHIFT_EXPR: case RSHIFT_EXPR:
5712 /* If the second operand is constant, this is a multiplication
5713 or floor division, by a power of two, so we can treat it that
5714 way unless the multiplier or divisor overflows. Signed
5715 left-shift overflow is implementation-defined rather than
5716 undefined in C90, so do not convert signed left shift into
5718 if (TREE_CODE (op1) == INTEGER_CST
5719 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5720 /* const_binop may not detect overflow correctly,
5721 so check for it explicitly here. */
5722 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5723 && TREE_INT_CST_HIGH (op1) == 0
5724 && 0 != (t1 = fold_convert (ctype,
5725 const_binop (LSHIFT_EXPR,
5728 && !TREE_OVERFLOW (t1))
5729 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5730 ? MULT_EXPR : FLOOR_DIV_EXPR,
5732 fold_convert (ctype, op0),
5734 c, code, wide_type, strict_overflow_p);
5737 case PLUS_EXPR: case MINUS_EXPR:
5738 /* See if we can eliminate the operation on both sides. If we can, we
5739 can return a new PLUS or MINUS. If we can't, the only remaining
5740 cases where we can do anything are if the second operand is a
5742 sub_strict_overflow_p = false;
5743 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5744 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5745 if (t1 != 0 && t2 != 0
5746 && (code == MULT_EXPR
5747 /* If not multiplication, we can only do this if both operands
5748 are divisible by c. */
5749 || (multiple_of_p (ctype, op0, c)
5750 && multiple_of_p (ctype, op1, c))))
5752 if (sub_strict_overflow_p)
5753 *strict_overflow_p = true;
5754 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5755 fold_convert (ctype, t2));
5758 /* If this was a subtraction, negate OP1 and set it to be an addition.
5759 This simplifies the logic below. */
5760 if (tcode == MINUS_EXPR)
5762 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5763 /* If OP1 was not easily negatable, the constant may be OP0. */
5764 if (TREE_CODE (op0) == INTEGER_CST)
5775 if (TREE_CODE (op1) != INTEGER_CST)
5778 /* If either OP1 or C are negative, this optimization is not safe for
5779 some of the division and remainder types while for others we need
5780 to change the code. */
5781 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5783 if (code == CEIL_DIV_EXPR)
5784 code = FLOOR_DIV_EXPR;
5785 else if (code == FLOOR_DIV_EXPR)
5786 code = CEIL_DIV_EXPR;
5787 else if (code != MULT_EXPR
5788 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5792 /* If it's a multiply or a division/modulus operation of a multiple
5793 of our constant, do the operation and verify it doesn't overflow. */
5794 if (code == MULT_EXPR
5795 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5797 op1 = const_binop (code, fold_convert (ctype, op1),
5798 fold_convert (ctype, c));
5799 /* We allow the constant to overflow with wrapping semantics. */
5801 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5807 /* If we have an unsigned type is not a sizetype, we cannot widen
5808 the operation since it will change the result if the original
5809 computation overflowed. */
5810 if (TYPE_UNSIGNED (ctype)
5811 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5815 /* If we were able to eliminate our operation from the first side,
5816 apply our operation to the second side and reform the PLUS. */
5817 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5818 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5820 /* The last case is if we are a multiply. In that case, we can
5821 apply the distributive law to commute the multiply and addition
5822 if the multiplication of the constants doesn't overflow. */
5823 if (code == MULT_EXPR)
5824 return fold_build2 (tcode, ctype,
5825 fold_build2 (code, ctype,
5826 fold_convert (ctype, op0),
5827 fold_convert (ctype, c)),
5833 /* We have a special case here if we are doing something like
5834 (C * 8) % 4 since we know that's zero. */
5835 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5836 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5837 /* If the multiplication can overflow we cannot optimize this.
5838 ??? Until we can properly mark individual operations as
5839 not overflowing we need to treat sizetype special here as
5840 stor-layout relies on this opimization to make
5841 DECL_FIELD_BIT_OFFSET always a constant. */
5842 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5843 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5844 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5845 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5846 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5848 *strict_overflow_p = true;
5849 return omit_one_operand (type, integer_zero_node, op0);
5852 /* ... fall through ... */
5854 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5855 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5856 /* If we can extract our operation from the LHS, do so and return a
5857 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5858 do something only if the second operand is a constant. */
5860 && (t1 = extract_muldiv (op0, c, code, wide_type,
5861 strict_overflow_p)) != 0)
5862 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5863 fold_convert (ctype, op1));
5864 else if (tcode == MULT_EXPR && code == MULT_EXPR
5865 && (t1 = extract_muldiv (op1, c, code, wide_type,
5866 strict_overflow_p)) != 0)
5867 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5868 fold_convert (ctype, t1));
5869 else if (TREE_CODE (op1) != INTEGER_CST)
5872 /* If these are the same operation types, we can associate them
5873 assuming no overflow. */
5875 && 0 != (t1 = int_const_binop (MULT_EXPR,
5876 fold_convert (ctype, op1),
5877 fold_convert (ctype, c), 1))
5878 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5879 (TYPE_UNSIGNED (ctype)
5880 && tcode != MULT_EXPR) ? -1 : 1,
5881 TREE_OVERFLOW (t1)))
5882 && !TREE_OVERFLOW (t1))
5883 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5885 /* If these operations "cancel" each other, we have the main
5886 optimizations of this pass, which occur when either constant is a
5887 multiple of the other, in which case we replace this with either an
5888 operation or CODE or TCODE.
5890 If we have an unsigned type that is not a sizetype, we cannot do
5891 this since it will change the result if the original computation
5893 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5894 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5895 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5896 || (tcode == MULT_EXPR
5897 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5898 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5899 && code != MULT_EXPR)))
5901 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5903 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5904 *strict_overflow_p = true;
5905 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5906 fold_convert (ctype,
5907 const_binop (TRUNC_DIV_EXPR,
5910 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5912 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5913 *strict_overflow_p = true;
5914 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5915 fold_convert (ctype,
5916 const_binop (TRUNC_DIV_EXPR,
5929 /* Return a node which has the indicated constant VALUE (either 0 or
5930 1), and is of the indicated TYPE. */
5933 constant_boolean_node (int value, tree type)
5935 if (type == integer_type_node)
5936 return value ? integer_one_node : integer_zero_node;
5937 else if (type == boolean_type_node)
5938 return value ? boolean_true_node : boolean_false_node;
5940 return build_int_cst (type, value);
5944 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5945 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5946 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5947 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5948 COND is the first argument to CODE; otherwise (as in the example
5949 given here), it is the second argument. TYPE is the type of the
5950 original expression. Return NULL_TREE if no simplification is
5954 fold_binary_op_with_conditional_arg (location_t loc,
5955 enum tree_code code,
5956 tree type, tree op0, tree op1,
5957 tree cond, tree arg, int cond_first_p)
5959 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5960 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5961 tree test, true_value, false_value;
5962 tree lhs = NULL_TREE;
5963 tree rhs = NULL_TREE;
5965 if (TREE_CODE (cond) == COND_EXPR)
5967 test = TREE_OPERAND (cond, 0);
5968 true_value = TREE_OPERAND (cond, 1);
5969 false_value = TREE_OPERAND (cond, 2);
5970 /* If this operand throws an expression, then it does not make
5971 sense to try to perform a logical or arithmetic operation
5973 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5975 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5980 tree testtype = TREE_TYPE (cond);
5982 true_value = constant_boolean_node (true, testtype);
5983 false_value = constant_boolean_node (false, testtype);
5986 /* This transformation is only worthwhile if we don't have to wrap ARG
5987 in a SAVE_EXPR and the operation can be simplified on at least one
5988 of the branches once its pushed inside the COND_EXPR. */
5989 if (!TREE_CONSTANT (arg)
5990 && (TREE_SIDE_EFFECTS (arg)
5991 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5994 arg = fold_convert_loc (loc, arg_type, arg);
5997 true_value = fold_convert_loc (loc, cond_type, true_value);
5999 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6001 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6005 false_value = fold_convert_loc (loc, cond_type, false_value);
6007 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6009 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6012 /* Check that we have simplified at least one of the branches. */
6013 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6016 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6020 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6022 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6023 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6024 ADDEND is the same as X.
6026 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6027 and finite. The problematic cases are when X is zero, and its mode
6028 has signed zeros. In the case of rounding towards -infinity,
6029 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6030 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6033 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6035 if (!real_zerop (addend))
6038 /* Don't allow the fold with -fsignaling-nans. */
6039 if (HONOR_SNANS (TYPE_MODE (type)))
6042 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6043 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6046 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6047 if (TREE_CODE (addend) == REAL_CST
6048 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6051 /* The mode has signed zeros, and we have to honor their sign.
6052 In this situation, there is only one case we can return true for.
6053 X - 0 is the same as X unless rounding towards -infinity is
6055 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6058 /* Subroutine of fold() that checks comparisons of built-in math
6059 functions against real constants.
6061 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6062 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6063 is the type of the result and ARG0 and ARG1 are the operands of the
6064 comparison. ARG1 must be a TREE_REAL_CST.
6066 The function returns the constant folded tree if a simplification
6067 can be made, and NULL_TREE otherwise. */
6070 fold_mathfn_compare (location_t loc,
6071 enum built_in_function fcode, enum tree_code code,
6072 tree type, tree arg0, tree arg1)
6076 if (BUILTIN_SQRT_P (fcode))
6078 tree arg = CALL_EXPR_ARG (arg0, 0);
6079 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6081 c = TREE_REAL_CST (arg1);
6082 if (REAL_VALUE_NEGATIVE (c))
6084 /* sqrt(x) < y is always false, if y is negative. */
6085 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6086 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6088 /* sqrt(x) > y is always true, if y is negative and we
6089 don't care about NaNs, i.e. negative values of x. */
6090 if (code == NE_EXPR || !HONOR_NANS (mode))
6091 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6093 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6094 return fold_build2_loc (loc, GE_EXPR, type, arg,
6095 build_real (TREE_TYPE (arg), dconst0));
6097 else if (code == GT_EXPR || code == GE_EXPR)
6101 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6102 real_convert (&c2, mode, &c2);
6104 if (REAL_VALUE_ISINF (c2))
6106 /* sqrt(x) > y is x == +Inf, when y is very large. */
6107 if (HONOR_INFINITIES (mode))
6108 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6109 build_real (TREE_TYPE (arg), c2));
6111 /* sqrt(x) > y is always false, when y is very large
6112 and we don't care about infinities. */
6113 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6116 /* sqrt(x) > c is the same as x > c*c. */
6117 return fold_build2_loc (loc, code, type, arg,
6118 build_real (TREE_TYPE (arg), c2));
6120 else if (code == LT_EXPR || code == LE_EXPR)
6124 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6125 real_convert (&c2, mode, &c2);
6127 if (REAL_VALUE_ISINF (c2))
6129 /* sqrt(x) < y is always true, when y is a very large
6130 value and we don't care about NaNs or Infinities. */
6131 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6132 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6134 /* sqrt(x) < y is x != +Inf when y is very large and we
6135 don't care about NaNs. */
6136 if (! HONOR_NANS (mode))
6137 return fold_build2_loc (loc, NE_EXPR, type, arg,
6138 build_real (TREE_TYPE (arg), c2));
6140 /* sqrt(x) < y is x >= 0 when y is very large and we
6141 don't care about Infinities. */
6142 if (! HONOR_INFINITIES (mode))
6143 return fold_build2_loc (loc, GE_EXPR, type, arg,
6144 build_real (TREE_TYPE (arg), dconst0));
6146 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6147 if (lang_hooks.decls.global_bindings_p () != 0
6148 || CONTAINS_PLACEHOLDER_P (arg))
6151 arg = save_expr (arg);
6152 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6153 fold_build2_loc (loc, GE_EXPR, type, arg,
6154 build_real (TREE_TYPE (arg),
6156 fold_build2_loc (loc, NE_EXPR, type, arg,
6157 build_real (TREE_TYPE (arg),
6161 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6162 if (! HONOR_NANS (mode))
6163 return fold_build2_loc (loc, code, type, arg,
6164 build_real (TREE_TYPE (arg), c2));
6166 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6167 if (lang_hooks.decls.global_bindings_p () == 0
6168 && ! CONTAINS_PLACEHOLDER_P (arg))
6170 arg = save_expr (arg);
6171 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6172 fold_build2_loc (loc, GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg),
6175 fold_build2_loc (loc, code, type, arg,
6176 build_real (TREE_TYPE (arg),
6185 /* Subroutine of fold() that optimizes comparisons against Infinities,
6186 either +Inf or -Inf.
6188 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6189 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6190 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6192 The function returns the constant folded tree if a simplification
6193 can be made, and NULL_TREE otherwise. */
6196 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6197 tree arg0, tree arg1)
6199 enum machine_mode mode;
6200 REAL_VALUE_TYPE max;
6204 mode = TYPE_MODE (TREE_TYPE (arg0));
6206 /* For negative infinity swap the sense of the comparison. */
6207 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6209 code = swap_tree_comparison (code);
6214 /* x > +Inf is always false, if with ignore sNANs. */
6215 if (HONOR_SNANS (mode))
6217 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6220 /* x <= +Inf is always true, if we don't case about NaNs. */
6221 if (! HONOR_NANS (mode))
6222 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6224 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6225 if (lang_hooks.decls.global_bindings_p () == 0
6226 && ! CONTAINS_PLACEHOLDER_P (arg0))
6228 arg0 = save_expr (arg0);
6229 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6235 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6236 real_maxval (&max, neg, mode);
6237 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6238 arg0, build_real (TREE_TYPE (arg0), max));
6241 /* x < +Inf is always equal to x <= DBL_MAX. */
6242 real_maxval (&max, neg, mode);
6243 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6244 arg0, build_real (TREE_TYPE (arg0), max));
6247 /* x != +Inf is always equal to !(x > DBL_MAX). */
6248 real_maxval (&max, neg, mode);
6249 if (! HONOR_NANS (mode))
6250 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6251 arg0, build_real (TREE_TYPE (arg0), max));
6253 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6254 arg0, build_real (TREE_TYPE (arg0), max));
6255 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6264 /* Subroutine of fold() that optimizes comparisons of a division by
6265 a nonzero integer constant against an integer constant, i.e.
6268 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6269 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6270 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6272 The function returns the constant folded tree if a simplification
6273 can be made, and NULL_TREE otherwise. */
6276 fold_div_compare (location_t loc,
6277 enum tree_code code, tree type, tree arg0, tree arg1)
6279 tree prod, tmp, hi, lo;
6280 tree arg00 = TREE_OPERAND (arg0, 0);
6281 tree arg01 = TREE_OPERAND (arg0, 1);
6283 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6287 /* We have to do this the hard way to detect unsigned overflow.
6288 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6289 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6290 TREE_INT_CST_HIGH (arg01),
6291 TREE_INT_CST_LOW (arg1),
6292 TREE_INT_CST_HIGH (arg1),
6293 &val.low, &val.high, unsigned_p);
6294 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6295 neg_overflow = false;
6299 tmp = int_const_binop (MINUS_EXPR, arg01,
6300 build_int_cst (TREE_TYPE (arg01), 1), 0);
6303 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6304 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6305 TREE_INT_CST_HIGH (prod),
6306 TREE_INT_CST_LOW (tmp),
6307 TREE_INT_CST_HIGH (tmp),
6308 &val.low, &val.high, unsigned_p);
6309 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6310 -1, overflow | TREE_OVERFLOW (prod));
6312 else if (tree_int_cst_sgn (arg01) >= 0)
6314 tmp = int_const_binop (MINUS_EXPR, arg01,
6315 build_int_cst (TREE_TYPE (arg01), 1), 0);
6316 switch (tree_int_cst_sgn (arg1))
6319 neg_overflow = true;
6320 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6325 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6330 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6340 /* A negative divisor reverses the relational operators. */
6341 code = swap_tree_comparison (code);
6343 tmp = int_const_binop (PLUS_EXPR, arg01,
6344 build_int_cst (TREE_TYPE (arg01), 1), 0);
6345 switch (tree_int_cst_sgn (arg1))
6348 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6353 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6358 neg_overflow = true;
6359 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6371 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6372 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6373 if (TREE_OVERFLOW (hi))
6374 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6375 if (TREE_OVERFLOW (lo))
6376 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6377 return build_range_check (loc, type, arg00, 1, lo, hi);
6380 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6381 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6382 if (TREE_OVERFLOW (hi))
6383 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6384 if (TREE_OVERFLOW (lo))
6385 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6386 return build_range_check (loc, type, arg00, 0, lo, hi);
6389 if (TREE_OVERFLOW (lo))
6391 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6392 return omit_one_operand_loc (loc, type, tmp, arg00);
6394 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6397 if (TREE_OVERFLOW (hi))
6399 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6400 return omit_one_operand_loc (loc, type, tmp, arg00);
6402 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6405 if (TREE_OVERFLOW (hi))
6407 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6408 return omit_one_operand_loc (loc, type, tmp, arg00);
6410 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6413 if (TREE_OVERFLOW (lo))
6415 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6416 return omit_one_operand_loc (loc, type, tmp, arg00);
6418 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6428 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6429 equality/inequality test, then return a simplified form of the test
6430 using a sign testing. Otherwise return NULL. TYPE is the desired
6434 fold_single_bit_test_into_sign_test (location_t loc,
6435 enum tree_code code, tree arg0, tree arg1,
6438 /* If this is testing a single bit, we can optimize the test. */
6439 if ((code == NE_EXPR || code == EQ_EXPR)
6440 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6441 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6443 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6444 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6445 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6447 if (arg00 != NULL_TREE
6448 /* This is only a win if casting to a signed type is cheap,
6449 i.e. when arg00's type is not a partial mode. */
6450 && TYPE_PRECISION (TREE_TYPE (arg00))
6451 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6453 tree stype = signed_type_for (TREE_TYPE (arg00));
6454 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6456 fold_convert_loc (loc, stype, arg00),
6457 build_int_cst (stype, 0));
6464 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6465 equality/inequality test, then return a simplified form of
6466 the test using shifts and logical operations. Otherwise return
6467 NULL. TYPE is the desired result type. */
6470 fold_single_bit_test (location_t loc, enum tree_code code,
6471 tree arg0, tree arg1, tree result_type)
6473 /* If this is testing a single bit, we can optimize the test. */
6474 if ((code == NE_EXPR || code == EQ_EXPR)
6475 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6476 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6478 tree inner = TREE_OPERAND (arg0, 0);
6479 tree type = TREE_TYPE (arg0);
6480 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6481 enum machine_mode operand_mode = TYPE_MODE (type);
6483 tree signed_type, unsigned_type, intermediate_type;
6486 /* First, see if we can fold the single bit test into a sign-bit
6488 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6493 /* Otherwise we have (A & C) != 0 where C is a single bit,
6494 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6495 Similarly for (A & C) == 0. */
6497 /* If INNER is a right shift of a constant and it plus BITNUM does
6498 not overflow, adjust BITNUM and INNER. */
6499 if (TREE_CODE (inner) == RSHIFT_EXPR
6500 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6501 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6502 && bitnum < TYPE_PRECISION (type)
6503 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6504 bitnum - TYPE_PRECISION (type)))
6506 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6507 inner = TREE_OPERAND (inner, 0);
6510 /* If we are going to be able to omit the AND below, we must do our
6511 operations as unsigned. If we must use the AND, we have a choice.
6512 Normally unsigned is faster, but for some machines signed is. */
6513 #ifdef LOAD_EXTEND_OP
6514 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6515 && !flag_syntax_only) ? 0 : 1;
6520 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6521 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6522 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6523 inner = fold_convert_loc (loc, intermediate_type, inner);
6526 inner = build2 (RSHIFT_EXPR, intermediate_type,
6527 inner, size_int (bitnum));
6529 one = build_int_cst (intermediate_type, 1);
6531 if (code == EQ_EXPR)
6532 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6534 /* Put the AND last so it can combine with more things. */
6535 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6537 /* Make sure to return the proper type. */
6538 inner = fold_convert_loc (loc, result_type, inner);
6545 /* Check whether we are allowed to reorder operands arg0 and arg1,
6546 such that the evaluation of arg1 occurs before arg0. */
6549 reorder_operands_p (const_tree arg0, const_tree arg1)
6551 if (! flag_evaluation_order)
6553 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6555 return ! TREE_SIDE_EFFECTS (arg0)
6556 && ! TREE_SIDE_EFFECTS (arg1);
6559 /* Test whether it is preferable two swap two operands, ARG0 and
6560 ARG1, for example because ARG0 is an integer constant and ARG1
6561 isn't. If REORDER is true, only recommend swapping if we can
6562 evaluate the operands in reverse order. */
6565 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6567 STRIP_SIGN_NOPS (arg0);
6568 STRIP_SIGN_NOPS (arg1);
6570 if (TREE_CODE (arg1) == INTEGER_CST)
6572 if (TREE_CODE (arg0) == INTEGER_CST)
6575 if (TREE_CODE (arg1) == REAL_CST)
6577 if (TREE_CODE (arg0) == REAL_CST)
6580 if (TREE_CODE (arg1) == FIXED_CST)
6582 if (TREE_CODE (arg0) == FIXED_CST)
6585 if (TREE_CODE (arg1) == COMPLEX_CST)
6587 if (TREE_CODE (arg0) == COMPLEX_CST)
6590 if (TREE_CONSTANT (arg1))
6592 if (TREE_CONSTANT (arg0))
6595 if (optimize_function_for_size_p (cfun))
6598 if (reorder && flag_evaluation_order
6599 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6602 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6603 for commutative and comparison operators. Ensuring a canonical
6604 form allows the optimizers to find additional redundancies without
6605 having to explicitly check for both orderings. */
6606 if (TREE_CODE (arg0) == SSA_NAME
6607 && TREE_CODE (arg1) == SSA_NAME
6608 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6611 /* Put SSA_NAMEs last. */
6612 if (TREE_CODE (arg1) == SSA_NAME)
6614 if (TREE_CODE (arg0) == SSA_NAME)
6617 /* Put variables last. */
6626 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6627 ARG0 is extended to a wider type. */
6630 fold_widened_comparison (location_t loc, enum tree_code code,
6631 tree type, tree arg0, tree arg1)
6633 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6635 tree shorter_type, outer_type;
6639 if (arg0_unw == arg0)
6641 shorter_type = TREE_TYPE (arg0_unw);
6643 #ifdef HAVE_canonicalize_funcptr_for_compare
6644 /* Disable this optimization if we're casting a function pointer
6645 type on targets that require function pointer canonicalization. */
6646 if (HAVE_canonicalize_funcptr_for_compare
6647 && TREE_CODE (shorter_type) == POINTER_TYPE
6648 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6652 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6655 arg1_unw = get_unwidened (arg1, NULL_TREE);
6657 /* If possible, express the comparison in the shorter mode. */
6658 if ((code == EQ_EXPR || code == NE_EXPR
6659 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6660 && (TREE_TYPE (arg1_unw) == shorter_type
6661 || ((TYPE_PRECISION (shorter_type)
6662 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6663 && (TYPE_UNSIGNED (shorter_type)
6664 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6665 || (TREE_CODE (arg1_unw) == INTEGER_CST
6666 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6667 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6668 && int_fits_type_p (arg1_unw, shorter_type))))
6669 return fold_build2_loc (loc, code, type, arg0_unw,
6670 fold_convert_loc (loc, shorter_type, arg1_unw));
6672 if (TREE_CODE (arg1_unw) != INTEGER_CST
6673 || TREE_CODE (shorter_type) != INTEGER_TYPE
6674 || !int_fits_type_p (arg1_unw, shorter_type))
6677 /* If we are comparing with the integer that does not fit into the range
6678 of the shorter type, the result is known. */
6679 outer_type = TREE_TYPE (arg1_unw);
6680 min = lower_bound_in_type (outer_type, shorter_type);
6681 max = upper_bound_in_type (outer_type, shorter_type);
6683 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6685 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6692 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6697 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6703 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6705 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6710 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6712 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6721 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6722 ARG0 just the signedness is changed. */
6725 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6726 tree arg0, tree arg1)
6729 tree inner_type, outer_type;
6731 if (!CONVERT_EXPR_P (arg0))
6734 outer_type = TREE_TYPE (arg0);
6735 arg0_inner = TREE_OPERAND (arg0, 0);
6736 inner_type = TREE_TYPE (arg0_inner);
6738 #ifdef HAVE_canonicalize_funcptr_for_compare
6739 /* Disable this optimization if we're casting a function pointer
6740 type on targets that require function pointer canonicalization. */
6741 if (HAVE_canonicalize_funcptr_for_compare
6742 && TREE_CODE (inner_type) == POINTER_TYPE
6743 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6747 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6750 if (TREE_CODE (arg1) != INTEGER_CST
6751 && !(CONVERT_EXPR_P (arg1)
6752 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6755 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6756 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6761 if (TREE_CODE (arg1) == INTEGER_CST)
6762 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6763 0, TREE_OVERFLOW (arg1));
6765 arg1 = fold_convert_loc (loc, inner_type, arg1);
6767 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6770 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6771 step of the array. Reconstructs s and delta in the case of s *
6772 delta being an integer constant (and thus already folded). ADDR is
6773 the address. MULT is the multiplicative expression. If the
6774 function succeeds, the new address expression is returned.
6775 Otherwise NULL_TREE is returned. LOC is the location of the
6776 resulting expression. */
6779 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6781 tree s, delta, step;
6782 tree ref = TREE_OPERAND (addr, 0), pref;
6787 /* Strip the nops that might be added when converting op1 to sizetype. */
6790 /* Canonicalize op1 into a possibly non-constant delta
6791 and an INTEGER_CST s. */
6792 if (TREE_CODE (op1) == MULT_EXPR)
6794 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6799 if (TREE_CODE (arg0) == INTEGER_CST)
6804 else if (TREE_CODE (arg1) == INTEGER_CST)
6812 else if (TREE_CODE (op1) == INTEGER_CST)
6819 /* Simulate we are delta * 1. */
6821 s = integer_one_node;
6824 for (;; ref = TREE_OPERAND (ref, 0))
6826 if (TREE_CODE (ref) == ARRAY_REF)
6830 /* Remember if this was a multi-dimensional array. */
6831 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6834 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6837 itype = TREE_TYPE (domain);
6839 step = array_ref_element_size (ref);
6840 if (TREE_CODE (step) != INTEGER_CST)
6845 if (! tree_int_cst_equal (step, s))
6850 /* Try if delta is a multiple of step. */
6851 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6857 /* Only fold here if we can verify we do not overflow one
6858 dimension of a multi-dimensional array. */
6863 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6864 || !TYPE_MAX_VALUE (domain)
6865 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6868 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6869 fold_convert_loc (loc, itype,
6870 TREE_OPERAND (ref, 1)),
6871 fold_convert_loc (loc, itype, delta));
6873 || TREE_CODE (tmp) != INTEGER_CST
6874 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6883 if (!handled_component_p (ref))
6887 /* We found the suitable array reference. So copy everything up to it,
6888 and replace the index. */
6890 pref = TREE_OPERAND (addr, 0);
6891 ret = copy_node (pref);
6892 SET_EXPR_LOCATION (ret, loc);
6897 pref = TREE_OPERAND (pref, 0);
6898 TREE_OPERAND (pos, 0) = copy_node (pref);
6899 pos = TREE_OPERAND (pos, 0);
6902 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6903 fold_convert_loc (loc, itype,
6904 TREE_OPERAND (pos, 1)),
6905 fold_convert_loc (loc, itype, delta));
6907 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6911 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6912 means A >= Y && A != MAX, but in this case we know that
6913 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6916 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6918 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6920 if (TREE_CODE (bound) == LT_EXPR)
6921 a = TREE_OPERAND (bound, 0);
6922 else if (TREE_CODE (bound) == GT_EXPR)
6923 a = TREE_OPERAND (bound, 1);
6927 typea = TREE_TYPE (a);
6928 if (!INTEGRAL_TYPE_P (typea)
6929 && !POINTER_TYPE_P (typea))
6932 if (TREE_CODE (ineq) == LT_EXPR)
6934 a1 = TREE_OPERAND (ineq, 1);
6935 y = TREE_OPERAND (ineq, 0);
6937 else if (TREE_CODE (ineq) == GT_EXPR)
6939 a1 = TREE_OPERAND (ineq, 0);
6940 y = TREE_OPERAND (ineq, 1);
6945 if (TREE_TYPE (a1) != typea)
6948 if (POINTER_TYPE_P (typea))
6950 /* Convert the pointer types into integer before taking the difference. */
6951 tree ta = fold_convert_loc (loc, ssizetype, a);
6952 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6953 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6956 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6958 if (!diff || !integer_onep (diff))
6961 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6964 /* Fold a sum or difference of at least one multiplication.
6965 Returns the folded tree or NULL if no simplification could be made. */
6968 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6969 tree arg0, tree arg1)
6971 tree arg00, arg01, arg10, arg11;
6972 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6974 /* (A * C) +- (B * C) -> (A+-B) * C.
6975 (A * C) +- A -> A * (C+-1).
6976 We are most concerned about the case where C is a constant,
6977 but other combinations show up during loop reduction. Since
6978 it is not difficult, try all four possibilities. */
6980 if (TREE_CODE (arg0) == MULT_EXPR)
6982 arg00 = TREE_OPERAND (arg0, 0);
6983 arg01 = TREE_OPERAND (arg0, 1);
6985 else if (TREE_CODE (arg0) == INTEGER_CST)
6987 arg00 = build_one_cst (type);
6992 /* We cannot generate constant 1 for fract. */
6993 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6996 arg01 = build_one_cst (type);
6998 if (TREE_CODE (arg1) == MULT_EXPR)
7000 arg10 = TREE_OPERAND (arg1, 0);
7001 arg11 = TREE_OPERAND (arg1, 1);
7003 else if (TREE_CODE (arg1) == INTEGER_CST)
7005 arg10 = build_one_cst (type);
7006 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7007 the purpose of this canonicalization. */
7008 if (TREE_INT_CST_HIGH (arg1) == -1
7009 && negate_expr_p (arg1)
7010 && code == PLUS_EXPR)
7012 arg11 = negate_expr (arg1);
7020 /* We cannot generate constant 1 for fract. */
7021 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7024 arg11 = build_one_cst (type);
7028 if (operand_equal_p (arg01, arg11, 0))
7029 same = arg01, alt0 = arg00, alt1 = arg10;
7030 else if (operand_equal_p (arg00, arg10, 0))
7031 same = arg00, alt0 = arg01, alt1 = arg11;
7032 else if (operand_equal_p (arg00, arg11, 0))
7033 same = arg00, alt0 = arg01, alt1 = arg10;
7034 else if (operand_equal_p (arg01, arg10, 0))
7035 same = arg01, alt0 = arg00, alt1 = arg11;
7037 /* No identical multiplicands; see if we can find a common
7038 power-of-two factor in non-power-of-two multiplies. This
7039 can help in multi-dimensional array access. */
7040 else if (host_integerp (arg01, 0)
7041 && host_integerp (arg11, 0))
7043 HOST_WIDE_INT int01, int11, tmp;
7046 int01 = TREE_INT_CST_LOW (arg01);
7047 int11 = TREE_INT_CST_LOW (arg11);
7049 /* Move min of absolute values to int11. */
7050 if ((int01 >= 0 ? int01 : -int01)
7051 < (int11 >= 0 ? int11 : -int11))
7053 tmp = int01, int01 = int11, int11 = tmp;
7054 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7061 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7062 /* The remainder should not be a constant, otherwise we
7063 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7064 increased the number of multiplications necessary. */
7065 && TREE_CODE (arg10) != INTEGER_CST)
7067 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7068 build_int_cst (TREE_TYPE (arg00),
7073 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7078 return fold_build2_loc (loc, MULT_EXPR, type,
7079 fold_build2_loc (loc, code, type,
7080 fold_convert_loc (loc, type, alt0),
7081 fold_convert_loc (loc, type, alt1)),
7082 fold_convert_loc (loc, type, same));
7087 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7088 specified by EXPR into the buffer PTR of length LEN bytes.
7089 Return the number of bytes placed in the buffer, or zero
7093 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7095 tree type = TREE_TYPE (expr);
7096 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7097 int byte, offset, word, words;
7098 unsigned char value;
7100 if (total_bytes > len)
7102 words = total_bytes / UNITS_PER_WORD;
7104 for (byte = 0; byte < total_bytes; byte++)
7106 int bitpos = byte * BITS_PER_UNIT;
7107 if (bitpos < HOST_BITS_PER_WIDE_INT)
7108 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7110 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7111 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7113 if (total_bytes > UNITS_PER_WORD)
7115 word = byte / UNITS_PER_WORD;
7116 if (WORDS_BIG_ENDIAN)
7117 word = (words - 1) - word;
7118 offset = word * UNITS_PER_WORD;
7119 if (BYTES_BIG_ENDIAN)
7120 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7122 offset += byte % UNITS_PER_WORD;
7125 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7126 ptr[offset] = value;
7132 /* Subroutine of native_encode_expr. Encode the REAL_CST
7133 specified by EXPR into the buffer PTR of length LEN bytes.
7134 Return the number of bytes placed in the buffer, or zero
7138 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7140 tree type = TREE_TYPE (expr);
7141 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7142 int byte, offset, word, words, bitpos;
7143 unsigned char value;
7145 /* There are always 32 bits in each long, no matter the size of
7146 the hosts long. We handle floating point representations with
7150 if (total_bytes > len)
7152 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7154 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7156 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7157 bitpos += BITS_PER_UNIT)
7159 byte = (bitpos / BITS_PER_UNIT) & 3;
7160 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7162 if (UNITS_PER_WORD < 4)
7164 word = byte / UNITS_PER_WORD;
7165 if (WORDS_BIG_ENDIAN)
7166 word = (words - 1) - word;
7167 offset = word * UNITS_PER_WORD;
7168 if (BYTES_BIG_ENDIAN)
7169 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7171 offset += byte % UNITS_PER_WORD;
7174 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7175 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7180 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7181 specified by EXPR into the buffer PTR of length LEN bytes.
7182 Return the number of bytes placed in the buffer, or zero
7186 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7191 part = TREE_REALPART (expr);
7192 rsize = native_encode_expr (part, ptr, len);
7195 part = TREE_IMAGPART (expr);
7196 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7199 return rsize + isize;
7203 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7209 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7211 int i, size, offset, count;
7212 tree itype, elem, elements;
7215 elements = TREE_VECTOR_CST_ELTS (expr);
7216 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7217 itype = TREE_TYPE (TREE_TYPE (expr));
7218 size = GET_MODE_SIZE (TYPE_MODE (itype));
7219 for (i = 0; i < count; i++)
7223 elem = TREE_VALUE (elements);
7224 elements = TREE_CHAIN (elements);
7231 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7236 if (offset + size > len)
7238 memset (ptr+offset, 0, size);
7246 /* Subroutine of native_encode_expr. Encode the STRING_CST
7247 specified by EXPR into the buffer PTR of length LEN bytes.
7248 Return the number of bytes placed in the buffer, or zero
7252 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7254 tree type = TREE_TYPE (expr);
7255 HOST_WIDE_INT total_bytes;
7257 if (TREE_CODE (type) != ARRAY_TYPE
7258 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7259 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7260 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7262 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7263 if (total_bytes > len)
7265 if (TREE_STRING_LENGTH (expr) < total_bytes)
7267 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7268 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7269 total_bytes - TREE_STRING_LENGTH (expr));
7272 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7277 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7278 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7279 buffer PTR of length LEN bytes. Return the number of bytes
7280 placed in the buffer, or zero upon failure. */
7283 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7285 switch (TREE_CODE (expr))
7288 return native_encode_int (expr, ptr, len);
7291 return native_encode_real (expr, ptr, len);
7294 return native_encode_complex (expr, ptr, len);
7297 return native_encode_vector (expr, ptr, len);
7300 return native_encode_string (expr, ptr, len);
7308 /* Subroutine of native_interpret_expr. Interpret the contents of
7309 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7310 If the buffer cannot be interpreted, return NULL_TREE. */
7313 native_interpret_int (tree type, const unsigned char *ptr, int len)
7315 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7316 int byte, offset, word, words;
7317 unsigned char value;
7320 if (total_bytes > len)
7322 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7325 result = double_int_zero;
7326 words = total_bytes / UNITS_PER_WORD;
7328 for (byte = 0; byte < total_bytes; byte++)
7330 int bitpos = byte * BITS_PER_UNIT;
7331 if (total_bytes > UNITS_PER_WORD)
7333 word = byte / UNITS_PER_WORD;
7334 if (WORDS_BIG_ENDIAN)
7335 word = (words - 1) - word;
7336 offset = word * UNITS_PER_WORD;
7337 if (BYTES_BIG_ENDIAN)
7338 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7340 offset += byte % UNITS_PER_WORD;
7343 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7344 value = ptr[offset];
7346 if (bitpos < HOST_BITS_PER_WIDE_INT)
7347 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7349 result.high |= (unsigned HOST_WIDE_INT) value
7350 << (bitpos - HOST_BITS_PER_WIDE_INT);
7353 return double_int_to_tree (type, result);
7357 /* Subroutine of native_interpret_expr. Interpret the contents of
7358 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7359 If the buffer cannot be interpreted, return NULL_TREE. */
7362 native_interpret_real (tree type, const unsigned char *ptr, int len)
7364 enum machine_mode mode = TYPE_MODE (type);
7365 int total_bytes = GET_MODE_SIZE (mode);
7366 int byte, offset, word, words, bitpos;
7367 unsigned char value;
7368 /* There are always 32 bits in each long, no matter the size of
7369 the hosts long. We handle floating point representations with
7374 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7375 if (total_bytes > len || total_bytes > 24)
7377 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7379 memset (tmp, 0, sizeof (tmp));
7380 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7381 bitpos += BITS_PER_UNIT)
7383 byte = (bitpos / BITS_PER_UNIT) & 3;
7384 if (UNITS_PER_WORD < 4)
7386 word = byte / UNITS_PER_WORD;
7387 if (WORDS_BIG_ENDIAN)
7388 word = (words - 1) - word;
7389 offset = word * UNITS_PER_WORD;
7390 if (BYTES_BIG_ENDIAN)
7391 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7393 offset += byte % UNITS_PER_WORD;
7396 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7397 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7399 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7402 real_from_target (&r, tmp, mode);
7403 return build_real (type, r);
7407 /* Subroutine of native_interpret_expr. Interpret the contents of
7408 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7409 If the buffer cannot be interpreted, return NULL_TREE. */
7412 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7414 tree etype, rpart, ipart;
7417 etype = TREE_TYPE (type);
7418 size = GET_MODE_SIZE (TYPE_MODE (etype));
7421 rpart = native_interpret_expr (etype, ptr, size);
7424 ipart = native_interpret_expr (etype, ptr+size, size);
7427 return build_complex (type, rpart, ipart);
7431 /* Subroutine of native_interpret_expr. Interpret the contents of
7432 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7433 If the buffer cannot be interpreted, return NULL_TREE. */
7436 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7438 tree etype, elem, elements;
7441 etype = TREE_TYPE (type);
7442 size = GET_MODE_SIZE (TYPE_MODE (etype));
7443 count = TYPE_VECTOR_SUBPARTS (type);
7444 if (size * count > len)
7447 elements = NULL_TREE;
7448 for (i = count - 1; i >= 0; i--)
7450 elem = native_interpret_expr (etype, ptr+(i*size), size);
7453 elements = tree_cons (NULL_TREE, elem, elements);
7455 return build_vector (type, elements);
7459 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7460 the buffer PTR of length LEN as a constant of type TYPE. For
7461 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7462 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7463 return NULL_TREE. */
7466 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7468 switch (TREE_CODE (type))
7473 return native_interpret_int (type, ptr, len);
7476 return native_interpret_real (type, ptr, len);
7479 return native_interpret_complex (type, ptr, len);
7482 return native_interpret_vector (type, ptr, len);
7490 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7491 TYPE at compile-time. If we're unable to perform the conversion
7492 return NULL_TREE. */
7495 fold_view_convert_expr (tree type, tree expr)
7497 /* We support up to 512-bit values (for V8DFmode). */
7498 unsigned char buffer[64];
7501 /* Check that the host and target are sane. */
7502 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7505 len = native_encode_expr (expr, buffer, sizeof (buffer));
7509 return native_interpret_expr (type, buffer, len);
7512 /* Build an expression for the address of T. Folds away INDIRECT_REF
7513 to avoid confusing the gimplify process. */
7516 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7518 /* The size of the object is not relevant when talking about its address. */
7519 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7520 t = TREE_OPERAND (t, 0);
7522 if (TREE_CODE (t) == INDIRECT_REF)
7524 t = TREE_OPERAND (t, 0);
7526 if (TREE_TYPE (t) != ptrtype)
7527 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7529 else if (TREE_CODE (t) == MEM_REF
7530 && integer_zerop (TREE_OPERAND (t, 1)))
7531 return TREE_OPERAND (t, 0);
7532 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7534 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7536 if (TREE_TYPE (t) != ptrtype)
7537 t = fold_convert_loc (loc, ptrtype, t);
7540 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7545 /* Build an expression for the address of T. */
7548 build_fold_addr_expr_loc (location_t loc, tree t)
7550 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7552 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7555 /* Fold a unary expression of code CODE and type TYPE with operand
7556 OP0. Return the folded expression if folding is successful.
7557 Otherwise, return NULL_TREE. */
7560 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7564 enum tree_code_class kind = TREE_CODE_CLASS (code);
7566 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7567 && TREE_CODE_LENGTH (code) == 1);
7572 if (CONVERT_EXPR_CODE_P (code)
7573 || code == FLOAT_EXPR || code == ABS_EXPR)
7575 /* Don't use STRIP_NOPS, because signedness of argument type
7577 STRIP_SIGN_NOPS (arg0);
7581 /* Strip any conversions that don't change the mode. This
7582 is safe for every expression, except for a comparison
7583 expression because its signedness is derived from its
7586 Note that this is done as an internal manipulation within
7587 the constant folder, in order to find the simplest
7588 representation of the arguments so that their form can be
7589 studied. In any cases, the appropriate type conversions
7590 should be put back in the tree that will get out of the
7596 if (TREE_CODE_CLASS (code) == tcc_unary)
7598 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7599 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7600 fold_build1_loc (loc, code, type,
7601 fold_convert_loc (loc, TREE_TYPE (op0),
7602 TREE_OPERAND (arg0, 1))));
7603 else if (TREE_CODE (arg0) == COND_EXPR)
7605 tree arg01 = TREE_OPERAND (arg0, 1);
7606 tree arg02 = TREE_OPERAND (arg0, 2);
7607 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7608 arg01 = fold_build1_loc (loc, code, type,
7609 fold_convert_loc (loc,
7610 TREE_TYPE (op0), arg01));
7611 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7612 arg02 = fold_build1_loc (loc, code, type,
7613 fold_convert_loc (loc,
7614 TREE_TYPE (op0), arg02));
7615 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7618 /* If this was a conversion, and all we did was to move into
7619 inside the COND_EXPR, bring it back out. But leave it if
7620 it is a conversion from integer to integer and the
7621 result precision is no wider than a word since such a
7622 conversion is cheap and may be optimized away by combine,
7623 while it couldn't if it were outside the COND_EXPR. Then return
7624 so we don't get into an infinite recursion loop taking the
7625 conversion out and then back in. */
7627 if ((CONVERT_EXPR_CODE_P (code)
7628 || code == NON_LVALUE_EXPR)
7629 && TREE_CODE (tem) == COND_EXPR
7630 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7631 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7632 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7633 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7634 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7635 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7636 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7638 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7639 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7640 || flag_syntax_only))
7641 tem = build1_loc (loc, code, type,
7643 TREE_TYPE (TREE_OPERAND
7644 (TREE_OPERAND (tem, 1), 0)),
7645 TREE_OPERAND (tem, 0),
7646 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7647 TREE_OPERAND (TREE_OPERAND (tem, 2),
7651 else if (COMPARISON_CLASS_P (arg0))
7653 if (TREE_CODE (type) == BOOLEAN_TYPE)
7655 arg0 = copy_node (arg0);
7656 TREE_TYPE (arg0) = type;
7659 else if (TREE_CODE (type) != INTEGER_TYPE)
7660 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7661 fold_build1_loc (loc, code, type,
7663 fold_build1_loc (loc, code, type,
7664 integer_zero_node));
7671 /* Re-association barriers around constants and other re-association
7672 barriers can be removed. */
7673 if (CONSTANT_CLASS_P (op0)
7674 || TREE_CODE (op0) == PAREN_EXPR)
7675 return fold_convert_loc (loc, type, op0);
7680 case FIX_TRUNC_EXPR:
7681 if (TREE_TYPE (op0) == type)
7684 /* If we have (type) (a CMP b) and type is an integral type, return
7685 new expression involving the new type. */
7686 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7687 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7688 TREE_OPERAND (op0, 1));
7690 /* Handle cases of two conversions in a row. */
7691 if (CONVERT_EXPR_P (op0))
7693 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7694 tree inter_type = TREE_TYPE (op0);
7695 int inside_int = INTEGRAL_TYPE_P (inside_type);
7696 int inside_ptr = POINTER_TYPE_P (inside_type);
7697 int inside_float = FLOAT_TYPE_P (inside_type);
7698 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7699 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7700 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7701 int inter_int = INTEGRAL_TYPE_P (inter_type);
7702 int inter_ptr = POINTER_TYPE_P (inter_type);
7703 int inter_float = FLOAT_TYPE_P (inter_type);
7704 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7705 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7706 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7707 int final_int = INTEGRAL_TYPE_P (type);
7708 int final_ptr = POINTER_TYPE_P (type);
7709 int final_float = FLOAT_TYPE_P (type);
7710 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7711 unsigned int final_prec = TYPE_PRECISION (type);
7712 int final_unsignedp = TYPE_UNSIGNED (type);
7714 /* In addition to the cases of two conversions in a row
7715 handled below, if we are converting something to its own
7716 type via an object of identical or wider precision, neither
7717 conversion is needed. */
7718 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7719 && (((inter_int || inter_ptr) && final_int)
7720 || (inter_float && final_float))
7721 && inter_prec >= final_prec)
7722 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7724 /* Likewise, if the intermediate and initial types are either both
7725 float or both integer, we don't need the middle conversion if the
7726 former is wider than the latter and doesn't change the signedness
7727 (for integers). Avoid this if the final type is a pointer since
7728 then we sometimes need the middle conversion. Likewise if the
7729 final type has a precision not equal to the size of its mode. */
7730 if (((inter_int && inside_int)
7731 || (inter_float && inside_float)
7732 || (inter_vec && inside_vec))
7733 && inter_prec >= inside_prec
7734 && (inter_float || inter_vec
7735 || inter_unsignedp == inside_unsignedp)
7736 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7737 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7739 && (! final_vec || inter_prec == inside_prec))
7740 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7742 /* If we have a sign-extension of a zero-extended value, we can
7743 replace that by a single zero-extension. */
7744 if (inside_int && inter_int && final_int
7745 && inside_prec < inter_prec && inter_prec < final_prec
7746 && inside_unsignedp && !inter_unsignedp)
7747 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7749 /* Two conversions in a row are not needed unless:
7750 - some conversion is floating-point (overstrict for now), or
7751 - some conversion is a vector (overstrict for now), or
7752 - the intermediate type is narrower than both initial and
7754 - the intermediate type and innermost type differ in signedness,
7755 and the outermost type is wider than the intermediate, or
7756 - the initial type is a pointer type and the precisions of the
7757 intermediate and final types differ, or
7758 - the final type is a pointer type and the precisions of the
7759 initial and intermediate types differ. */
7760 if (! inside_float && ! inter_float && ! final_float
7761 && ! inside_vec && ! inter_vec && ! final_vec
7762 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7763 && ! (inside_int && inter_int
7764 && inter_unsignedp != inside_unsignedp
7765 && inter_prec < final_prec)
7766 && ((inter_unsignedp && inter_prec > inside_prec)
7767 == (final_unsignedp && final_prec > inter_prec))
7768 && ! (inside_ptr && inter_prec != final_prec)
7769 && ! (final_ptr && inside_prec != inter_prec)
7770 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7771 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7772 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7775 /* Handle (T *)&A.B.C for A being of type T and B and C
7776 living at offset zero. This occurs frequently in
7777 C++ upcasting and then accessing the base. */
7778 if (TREE_CODE (op0) == ADDR_EXPR
7779 && POINTER_TYPE_P (type)
7780 && handled_component_p (TREE_OPERAND (op0, 0)))
7782 HOST_WIDE_INT bitsize, bitpos;
7784 enum machine_mode mode;
7785 int unsignedp, volatilep;
7786 tree base = TREE_OPERAND (op0, 0);
7787 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7788 &mode, &unsignedp, &volatilep, false);
7789 /* If the reference was to a (constant) zero offset, we can use
7790 the address of the base if it has the same base type
7791 as the result type and the pointer type is unqualified. */
7792 if (! offset && bitpos == 0
7793 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7794 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7795 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7796 return fold_convert_loc (loc, type,
7797 build_fold_addr_expr_loc (loc, base));
7800 if (TREE_CODE (op0) == MODIFY_EXPR
7801 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7802 /* Detect assigning a bitfield. */
7803 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7805 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7807 /* Don't leave an assignment inside a conversion
7808 unless assigning a bitfield. */
7809 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7810 /* First do the assignment, then return converted constant. */
7811 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7812 TREE_NO_WARNING (tem) = 1;
7813 TREE_USED (tem) = 1;
7817 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7818 constants (if x has signed type, the sign bit cannot be set
7819 in c). This folds extension into the BIT_AND_EXPR.
7820 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7821 very likely don't have maximal range for their precision and this
7822 transformation effectively doesn't preserve non-maximal ranges. */
7823 if (TREE_CODE (type) == INTEGER_TYPE
7824 && TREE_CODE (op0) == BIT_AND_EXPR
7825 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7827 tree and_expr = op0;
7828 tree and0 = TREE_OPERAND (and_expr, 0);
7829 tree and1 = TREE_OPERAND (and_expr, 1);
7832 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7833 || (TYPE_PRECISION (type)
7834 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7836 else if (TYPE_PRECISION (TREE_TYPE (and1))
7837 <= HOST_BITS_PER_WIDE_INT
7838 && host_integerp (and1, 1))
7840 unsigned HOST_WIDE_INT cst;
7842 cst = tree_low_cst (and1, 1);
7843 cst &= (HOST_WIDE_INT) -1
7844 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7845 change = (cst == 0);
7846 #ifdef LOAD_EXTEND_OP
7848 && !flag_syntax_only
7849 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7852 tree uns = unsigned_type_for (TREE_TYPE (and0));
7853 and0 = fold_convert_loc (loc, uns, and0);
7854 and1 = fold_convert_loc (loc, uns, and1);
7860 tem = force_fit_type_double (type, tree_to_double_int (and1),
7861 0, TREE_OVERFLOW (and1));
7862 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7863 fold_convert_loc (loc, type, and0), tem);
7867 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7868 when one of the new casts will fold away. Conservatively we assume
7869 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7870 if (POINTER_TYPE_P (type)
7871 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7872 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7873 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7874 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7876 tree arg00 = TREE_OPERAND (arg0, 0);
7877 tree arg01 = TREE_OPERAND (arg0, 1);
7879 return fold_build2_loc (loc,
7880 TREE_CODE (arg0), type,
7881 fold_convert_loc (loc, type, arg00),
7882 fold_convert_loc (loc, sizetype, arg01));
7885 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7886 of the same precision, and X is an integer type not narrower than
7887 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7888 if (INTEGRAL_TYPE_P (type)
7889 && TREE_CODE (op0) == BIT_NOT_EXPR
7890 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7891 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7892 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7894 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7895 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7896 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7897 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7898 fold_convert_loc (loc, type, tem));
7901 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7902 type of X and Y (integer types only). */
7903 if (INTEGRAL_TYPE_P (type)
7904 && TREE_CODE (op0) == MULT_EXPR
7905 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7906 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7908 /* Be careful not to introduce new overflows. */
7910 if (TYPE_OVERFLOW_WRAPS (type))
7913 mult_type = unsigned_type_for (type);
7915 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7917 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7918 fold_convert_loc (loc, mult_type,
7919 TREE_OPERAND (op0, 0)),
7920 fold_convert_loc (loc, mult_type,
7921 TREE_OPERAND (op0, 1)));
7922 return fold_convert_loc (loc, type, tem);
7926 tem = fold_convert_const (code, type, op0);
7927 return tem ? tem : NULL_TREE;
7929 case ADDR_SPACE_CONVERT_EXPR:
7930 if (integer_zerop (arg0))
7931 return fold_convert_const (code, type, arg0);
7934 case FIXED_CONVERT_EXPR:
7935 tem = fold_convert_const (code, type, arg0);
7936 return tem ? tem : NULL_TREE;
7938 case VIEW_CONVERT_EXPR:
7939 if (TREE_TYPE (op0) == type)
7941 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7942 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7943 type, TREE_OPERAND (op0, 0));
7944 if (TREE_CODE (op0) == MEM_REF)
7945 return fold_build2_loc (loc, MEM_REF, type,
7946 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7948 /* For integral conversions with the same precision or pointer
7949 conversions use a NOP_EXPR instead. */
7950 if ((INTEGRAL_TYPE_P (type)
7951 || POINTER_TYPE_P (type))
7952 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7953 || POINTER_TYPE_P (TREE_TYPE (op0)))
7954 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7955 return fold_convert_loc (loc, type, op0);
7957 /* Strip inner integral conversions that do not change the precision. */
7958 if (CONVERT_EXPR_P (op0)
7959 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7960 || POINTER_TYPE_P (TREE_TYPE (op0)))
7961 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7962 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7963 && (TYPE_PRECISION (TREE_TYPE (op0))
7964 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7965 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7966 type, TREE_OPERAND (op0, 0));
7968 return fold_view_convert_expr (type, op0);
7971 tem = fold_negate_expr (loc, arg0);
7973 return fold_convert_loc (loc, type, tem);
7977 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7978 return fold_abs_const (arg0, type);
7979 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7980 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7981 /* Convert fabs((double)float) into (double)fabsf(float). */
7982 else if (TREE_CODE (arg0) == NOP_EXPR
7983 && TREE_CODE (type) == REAL_TYPE)
7985 tree targ0 = strip_float_extensions (arg0);
7987 return fold_convert_loc (loc, type,
7988 fold_build1_loc (loc, ABS_EXPR,
7992 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7993 else if (TREE_CODE (arg0) == ABS_EXPR)
7995 else if (tree_expr_nonnegative_p (arg0))
7998 /* Strip sign ops from argument. */
7999 if (TREE_CODE (type) == REAL_TYPE)
8001 tem = fold_strip_sign_ops (arg0);
8003 return fold_build1_loc (loc, ABS_EXPR, type,
8004 fold_convert_loc (loc, type, tem));
8009 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8010 return fold_convert_loc (loc, type, arg0);
8011 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8013 tree itype = TREE_TYPE (type);
8014 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8015 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8016 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8017 negate_expr (ipart));
8019 if (TREE_CODE (arg0) == COMPLEX_CST)
8021 tree itype = TREE_TYPE (type);
8022 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8023 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8024 return build_complex (type, rpart, negate_expr (ipart));
8026 if (TREE_CODE (arg0) == CONJ_EXPR)
8027 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8031 if (TREE_CODE (arg0) == INTEGER_CST)
8032 return fold_not_const (arg0, type);
8033 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8034 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8035 /* Convert ~ (-A) to A - 1. */
8036 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8037 return fold_build2_loc (loc, MINUS_EXPR, type,
8038 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8039 build_int_cst (type, 1));
8040 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8041 else if (INTEGRAL_TYPE_P (type)
8042 && ((TREE_CODE (arg0) == MINUS_EXPR
8043 && integer_onep (TREE_OPERAND (arg0, 1)))
8044 || (TREE_CODE (arg0) == PLUS_EXPR
8045 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8046 return fold_build1_loc (loc, NEGATE_EXPR, type,
8047 fold_convert_loc (loc, type,
8048 TREE_OPERAND (arg0, 0)));
8049 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8050 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8051 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8052 fold_convert_loc (loc, type,
8053 TREE_OPERAND (arg0, 0)))))
8054 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8055 fold_convert_loc (loc, type,
8056 TREE_OPERAND (arg0, 1)));
8057 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8058 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8059 fold_convert_loc (loc, type,
8060 TREE_OPERAND (arg0, 1)))))
8061 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8062 fold_convert_loc (loc, type,
8063 TREE_OPERAND (arg0, 0)), tem);
8064 /* Perform BIT_NOT_EXPR on each element individually. */
8065 else if (TREE_CODE (arg0) == VECTOR_CST)
8067 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8068 int count = TYPE_VECTOR_SUBPARTS (type), i;
8070 for (i = 0; i < count; i++)
8074 elem = TREE_VALUE (elements);
8075 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8076 if (elem == NULL_TREE)
8078 elements = TREE_CHAIN (elements);
8081 elem = build_int_cst (TREE_TYPE (type), -1);
8082 list = tree_cons (NULL_TREE, elem, list);
8085 return build_vector (type, nreverse (list));
8090 case TRUTH_NOT_EXPR:
8091 /* The argument to invert_truthvalue must have Boolean type. */
8092 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8093 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8095 /* Note that the operand of this must be an int
8096 and its values must be 0 or 1.
8097 ("true" is a fixed value perhaps depending on the language,
8098 but we don't handle values other than 1 correctly yet.) */
8099 tem = fold_truth_not_expr (loc, arg0);
8102 return fold_convert_loc (loc, type, tem);
8105 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8106 return fold_convert_loc (loc, type, arg0);
8107 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8108 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8109 TREE_OPERAND (arg0, 1));
8110 if (TREE_CODE (arg0) == COMPLEX_CST)
8111 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8112 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8114 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8115 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8116 fold_build1_loc (loc, REALPART_EXPR, itype,
8117 TREE_OPERAND (arg0, 0)),
8118 fold_build1_loc (loc, REALPART_EXPR, itype,
8119 TREE_OPERAND (arg0, 1)));
8120 return fold_convert_loc (loc, type, tem);
8122 if (TREE_CODE (arg0) == CONJ_EXPR)
8124 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8125 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8126 TREE_OPERAND (arg0, 0));
8127 return fold_convert_loc (loc, type, tem);
8129 if (TREE_CODE (arg0) == CALL_EXPR)
8131 tree fn = get_callee_fndecl (arg0);
8132 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8133 switch (DECL_FUNCTION_CODE (fn))
8135 CASE_FLT_FN (BUILT_IN_CEXPI):
8136 fn = mathfn_built_in (type, BUILT_IN_COS);
8138 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8148 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8149 return build_zero_cst (type);
8150 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8151 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8152 TREE_OPERAND (arg0, 0));
8153 if (TREE_CODE (arg0) == COMPLEX_CST)
8154 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8155 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8157 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8158 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8159 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8160 TREE_OPERAND (arg0, 0)),
8161 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8162 TREE_OPERAND (arg0, 1)));
8163 return fold_convert_loc (loc, type, tem);
8165 if (TREE_CODE (arg0) == CONJ_EXPR)
8167 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8168 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8169 return fold_convert_loc (loc, type, negate_expr (tem));
8171 if (TREE_CODE (arg0) == CALL_EXPR)
8173 tree fn = get_callee_fndecl (arg0);
8174 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8175 switch (DECL_FUNCTION_CODE (fn))
8177 CASE_FLT_FN (BUILT_IN_CEXPI):
8178 fn = mathfn_built_in (type, BUILT_IN_SIN);
8180 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8190 /* Fold *&X to X if X is an lvalue. */
8191 if (TREE_CODE (op0) == ADDR_EXPR)
8193 tree op00 = TREE_OPERAND (op0, 0);
8194 if ((TREE_CODE (op00) == VAR_DECL
8195 || TREE_CODE (op00) == PARM_DECL
8196 || TREE_CODE (op00) == RESULT_DECL)
8197 && !TREE_READONLY (op00))
8204 } /* switch (code) */
8208 /* If the operation was a conversion do _not_ mark a resulting constant
8209 with TREE_OVERFLOW if the original constant was not. These conversions
8210 have implementation defined behavior and retaining the TREE_OVERFLOW
8211 flag here would confuse later passes such as VRP. */
8213 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8214 tree type, tree op0)
8216 tree res = fold_unary_loc (loc, code, type, op0);
8218 && TREE_CODE (res) == INTEGER_CST
8219 && TREE_CODE (op0) == INTEGER_CST
8220 && CONVERT_EXPR_CODE_P (code))
8221 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8226 /* Fold a binary expression of code CODE and type TYPE with operands
8227 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8228 Return the folded expression if folding is successful. Otherwise,
8229 return NULL_TREE. */
8232 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8234 enum tree_code compl_code;
8236 if (code == MIN_EXPR)
8237 compl_code = MAX_EXPR;
8238 else if (code == MAX_EXPR)
8239 compl_code = MIN_EXPR;
8243 /* MIN (MAX (a, b), b) == b. */
8244 if (TREE_CODE (op0) == compl_code
8245 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8246 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8248 /* MIN (MAX (b, a), b) == b. */
8249 if (TREE_CODE (op0) == compl_code
8250 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8251 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8252 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8254 /* MIN (a, MAX (a, b)) == a. */
8255 if (TREE_CODE (op1) == compl_code
8256 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8257 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8258 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8260 /* MIN (a, MAX (b, a)) == a. */
8261 if (TREE_CODE (op1) == compl_code
8262 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8263 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8264 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8269 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8270 by changing CODE to reduce the magnitude of constants involved in
8271 ARG0 of the comparison.
8272 Returns a canonicalized comparison tree if a simplification was
8273 possible, otherwise returns NULL_TREE.
8274 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8275 valid if signed overflow is undefined. */
8278 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8279 tree arg0, tree arg1,
8280 bool *strict_overflow_p)
8282 enum tree_code code0 = TREE_CODE (arg0);
8283 tree t, cst0 = NULL_TREE;
8287 /* Match A +- CST code arg1 and CST code arg1. We can change the
8288 first form only if overflow is undefined. */
8289 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8290 /* In principle pointers also have undefined overflow behavior,
8291 but that causes problems elsewhere. */
8292 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8293 && (code0 == MINUS_EXPR
8294 || code0 == PLUS_EXPR)
8295 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8296 || code0 == INTEGER_CST))
8299 /* Identify the constant in arg0 and its sign. */
8300 if (code0 == INTEGER_CST)
8303 cst0 = TREE_OPERAND (arg0, 1);
8304 sgn0 = tree_int_cst_sgn (cst0);
8306 /* Overflowed constants and zero will cause problems. */
8307 if (integer_zerop (cst0)
8308 || TREE_OVERFLOW (cst0))
8311 /* See if we can reduce the magnitude of the constant in
8312 arg0 by changing the comparison code. */
8313 if (code0 == INTEGER_CST)
8315 /* CST <= arg1 -> CST-1 < arg1. */
8316 if (code == LE_EXPR && sgn0 == 1)
8318 /* -CST < arg1 -> -CST-1 <= arg1. */
8319 else if (code == LT_EXPR && sgn0 == -1)
8321 /* CST > arg1 -> CST-1 >= arg1. */
8322 else if (code == GT_EXPR && sgn0 == 1)
8324 /* -CST >= arg1 -> -CST-1 > arg1. */
8325 else if (code == GE_EXPR && sgn0 == -1)
8329 /* arg1 code' CST' might be more canonical. */
8334 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8336 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8338 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8339 else if (code == GT_EXPR
8340 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8342 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8343 else if (code == LE_EXPR
8344 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8346 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8347 else if (code == GE_EXPR
8348 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8352 *strict_overflow_p = true;
8355 /* Now build the constant reduced in magnitude. But not if that
8356 would produce one outside of its types range. */
8357 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8359 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8360 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8362 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8363 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8364 /* We cannot swap the comparison here as that would cause us to
8365 endlessly recurse. */
8368 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8369 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8370 if (code0 != INTEGER_CST)
8371 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8373 /* If swapping might yield to a more canonical form, do so. */
8375 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8377 return fold_build2_loc (loc, code, type, t, arg1);
8380 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8381 overflow further. Try to decrease the magnitude of constants involved
8382 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8383 and put sole constants at the second argument position.
8384 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8387 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8388 tree arg0, tree arg1)
8391 bool strict_overflow_p;
8392 const char * const warnmsg = G_("assuming signed overflow does not occur "
8393 "when reducing constant in comparison");
8395 /* Try canonicalization by simplifying arg0. */
8396 strict_overflow_p = false;
8397 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8398 &strict_overflow_p);
8401 if (strict_overflow_p)
8402 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8406 /* Try canonicalization by simplifying arg1 using the swapped
8408 code = swap_tree_comparison (code);
8409 strict_overflow_p = false;
8410 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8411 &strict_overflow_p);
8412 if (t && strict_overflow_p)
8413 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8417 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8418 space. This is used to avoid issuing overflow warnings for
8419 expressions like &p->x which can not wrap. */
8422 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8424 unsigned HOST_WIDE_INT offset_low, total_low;
8425 HOST_WIDE_INT size, offset_high, total_high;
8427 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8433 if (offset == NULL_TREE)
8438 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8442 offset_low = TREE_INT_CST_LOW (offset);
8443 offset_high = TREE_INT_CST_HIGH (offset);
8446 if (add_double_with_sign (offset_low, offset_high,
8447 bitpos / BITS_PER_UNIT, 0,
8448 &total_low, &total_high,
8452 if (total_high != 0)
8455 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8459 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8461 if (TREE_CODE (base) == ADDR_EXPR)
8463 HOST_WIDE_INT base_size;
8465 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8466 if (base_size > 0 && size < base_size)
8470 return total_low > (unsigned HOST_WIDE_INT) size;
8473 /* Subroutine of fold_binary. This routine performs all of the
8474 transformations that are common to the equality/inequality
8475 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8476 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8477 fold_binary should call fold_binary. Fold a comparison with
8478 tree code CODE and type TYPE with operands OP0 and OP1. Return
8479 the folded comparison or NULL_TREE. */
8482 fold_comparison (location_t loc, enum tree_code code, tree type,
8485 tree arg0, arg1, tem;
8490 STRIP_SIGN_NOPS (arg0);
8491 STRIP_SIGN_NOPS (arg1);
8493 tem = fold_relational_const (code, type, arg0, arg1);
8494 if (tem != NULL_TREE)
8497 /* If one arg is a real or integer constant, put it last. */
8498 if (tree_swap_operands_p (arg0, arg1, true))
8499 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8501 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8502 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8503 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8504 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8505 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8506 && (TREE_CODE (arg1) == INTEGER_CST
8507 && !TREE_OVERFLOW (arg1)))
8509 tree const1 = TREE_OPERAND (arg0, 1);
8511 tree variable = TREE_OPERAND (arg0, 0);
8514 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8516 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8517 TREE_TYPE (arg1), const2, const1);
8519 /* If the constant operation overflowed this can be
8520 simplified as a comparison against INT_MAX/INT_MIN. */
8521 if (TREE_CODE (lhs) == INTEGER_CST
8522 && TREE_OVERFLOW (lhs))
8524 int const1_sgn = tree_int_cst_sgn (const1);
8525 enum tree_code code2 = code;
8527 /* Get the sign of the constant on the lhs if the
8528 operation were VARIABLE + CONST1. */
8529 if (TREE_CODE (arg0) == MINUS_EXPR)
8530 const1_sgn = -const1_sgn;
8532 /* The sign of the constant determines if we overflowed
8533 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8534 Canonicalize to the INT_MIN overflow by swapping the comparison
8536 if (const1_sgn == -1)
8537 code2 = swap_tree_comparison (code);
8539 /* We now can look at the canonicalized case
8540 VARIABLE + 1 CODE2 INT_MIN
8541 and decide on the result. */
8542 if (code2 == LT_EXPR
8544 || code2 == EQ_EXPR)
8545 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8546 else if (code2 == NE_EXPR
8548 || code2 == GT_EXPR)
8549 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8552 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8553 && (TREE_CODE (lhs) != INTEGER_CST
8554 || !TREE_OVERFLOW (lhs)))
8556 if (code != EQ_EXPR && code != NE_EXPR)
8557 fold_overflow_warning ("assuming signed overflow does not occur "
8558 "when changing X +- C1 cmp C2 to "
8560 WARN_STRICT_OVERFLOW_COMPARISON);
8561 return fold_build2_loc (loc, code, type, variable, lhs);
8565 /* For comparisons of pointers we can decompose it to a compile time
8566 comparison of the base objects and the offsets into the object.
8567 This requires at least one operand being an ADDR_EXPR or a
8568 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8569 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8570 && (TREE_CODE (arg0) == ADDR_EXPR
8571 || TREE_CODE (arg1) == ADDR_EXPR
8572 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8573 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8575 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8576 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8577 enum machine_mode mode;
8578 int volatilep, unsignedp;
8579 bool indirect_base0 = false, indirect_base1 = false;
8581 /* Get base and offset for the access. Strip ADDR_EXPR for
8582 get_inner_reference, but put it back by stripping INDIRECT_REF
8583 off the base object if possible. indirect_baseN will be true
8584 if baseN is not an address but refers to the object itself. */
8586 if (TREE_CODE (arg0) == ADDR_EXPR)
8588 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8589 &bitsize, &bitpos0, &offset0, &mode,
8590 &unsignedp, &volatilep, false);
8591 if (TREE_CODE (base0) == INDIRECT_REF)
8592 base0 = TREE_OPERAND (base0, 0);
8594 indirect_base0 = true;
8596 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8598 base0 = TREE_OPERAND (arg0, 0);
8599 STRIP_SIGN_NOPS (base0);
8600 if (TREE_CODE (base0) == ADDR_EXPR)
8602 base0 = TREE_OPERAND (base0, 0);
8603 indirect_base0 = true;
8605 offset0 = TREE_OPERAND (arg0, 1);
8609 if (TREE_CODE (arg1) == ADDR_EXPR)
8611 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8612 &bitsize, &bitpos1, &offset1, &mode,
8613 &unsignedp, &volatilep, false);
8614 if (TREE_CODE (base1) == INDIRECT_REF)
8615 base1 = TREE_OPERAND (base1, 0);
8617 indirect_base1 = true;
8619 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8621 base1 = TREE_OPERAND (arg1, 0);
8622 STRIP_SIGN_NOPS (base1);
8623 if (TREE_CODE (base1) == ADDR_EXPR)
8625 base1 = TREE_OPERAND (base1, 0);
8626 indirect_base1 = true;
8628 offset1 = TREE_OPERAND (arg1, 1);
8631 /* A local variable can never be pointed to by
8632 the default SSA name of an incoming parameter. */
8633 if ((TREE_CODE (arg0) == ADDR_EXPR
8635 && TREE_CODE (base0) == VAR_DECL
8636 && auto_var_in_fn_p (base0, current_function_decl)
8638 && TREE_CODE (base1) == SSA_NAME
8639 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8640 && SSA_NAME_IS_DEFAULT_DEF (base1))
8641 || (TREE_CODE (arg1) == ADDR_EXPR
8643 && TREE_CODE (base1) == VAR_DECL
8644 && auto_var_in_fn_p (base1, current_function_decl)
8646 && TREE_CODE (base0) == SSA_NAME
8647 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8648 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8650 if (code == NE_EXPR)
8651 return constant_boolean_node (1, type);
8652 else if (code == EQ_EXPR)
8653 return constant_boolean_node (0, type);
8655 /* If we have equivalent bases we might be able to simplify. */
8656 else if (indirect_base0 == indirect_base1
8657 && operand_equal_p (base0, base1, 0))
8659 /* We can fold this expression to a constant if the non-constant
8660 offset parts are equal. */
8661 if ((offset0 == offset1
8662 || (offset0 && offset1
8663 && operand_equal_p (offset0, offset1, 0)))
8666 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8671 && bitpos0 != bitpos1
8672 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8673 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8674 fold_overflow_warning (("assuming pointer wraparound does not "
8675 "occur when comparing P +- C1 with "
8677 WARN_STRICT_OVERFLOW_CONDITIONAL);
8682 return constant_boolean_node (bitpos0 == bitpos1, type);
8684 return constant_boolean_node (bitpos0 != bitpos1, type);
8686 return constant_boolean_node (bitpos0 < bitpos1, type);
8688 return constant_boolean_node (bitpos0 <= bitpos1, type);
8690 return constant_boolean_node (bitpos0 >= bitpos1, type);
8692 return constant_boolean_node (bitpos0 > bitpos1, type);
8696 /* We can simplify the comparison to a comparison of the variable
8697 offset parts if the constant offset parts are equal.
8698 Be careful to use signed size type here because otherwise we
8699 mess with array offsets in the wrong way. This is possible
8700 because pointer arithmetic is restricted to retain within an
8701 object and overflow on pointer differences is undefined as of
8702 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8703 else if (bitpos0 == bitpos1
8704 && ((code == EQ_EXPR || code == NE_EXPR)
8705 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8707 /* By converting to signed size type we cover middle-end pointer
8708 arithmetic which operates on unsigned pointer types of size
8709 type size and ARRAY_REF offsets which are properly sign or
8710 zero extended from their type in case it is narrower than
8712 if (offset0 == NULL_TREE)
8713 offset0 = build_int_cst (ssizetype, 0);
8715 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8716 if (offset1 == NULL_TREE)
8717 offset1 = build_int_cst (ssizetype, 0);
8719 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8723 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8724 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8725 fold_overflow_warning (("assuming pointer wraparound does not "
8726 "occur when comparing P +- C1 with "
8728 WARN_STRICT_OVERFLOW_COMPARISON);
8730 return fold_build2_loc (loc, code, type, offset0, offset1);
8733 /* For non-equal bases we can simplify if they are addresses
8734 of local binding decls or constants. */
8735 else if (indirect_base0 && indirect_base1
8736 /* We know that !operand_equal_p (base0, base1, 0)
8737 because the if condition was false. But make
8738 sure two decls are not the same. */
8740 && TREE_CODE (arg0) == ADDR_EXPR
8741 && TREE_CODE (arg1) == ADDR_EXPR
8742 && (((TREE_CODE (base0) == VAR_DECL
8743 || TREE_CODE (base0) == PARM_DECL)
8744 && (targetm.binds_local_p (base0)
8745 || CONSTANT_CLASS_P (base1)))
8746 || CONSTANT_CLASS_P (base0))
8747 && (((TREE_CODE (base1) == VAR_DECL
8748 || TREE_CODE (base1) == PARM_DECL)
8749 && (targetm.binds_local_p (base1)
8750 || CONSTANT_CLASS_P (base0)))
8751 || CONSTANT_CLASS_P (base1)))
8753 if (code == EQ_EXPR)
8754 return omit_two_operands_loc (loc, type, boolean_false_node,
8756 else if (code == NE_EXPR)
8757 return omit_two_operands_loc (loc, type, boolean_true_node,
8760 /* For equal offsets we can simplify to a comparison of the
8762 else if (bitpos0 == bitpos1
8764 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8766 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8767 && ((offset0 == offset1)
8768 || (offset0 && offset1
8769 && operand_equal_p (offset0, offset1, 0))))
8772 base0 = build_fold_addr_expr_loc (loc, base0);
8774 base1 = build_fold_addr_expr_loc (loc, base1);
8775 return fold_build2_loc (loc, code, type, base0, base1);
8779 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8780 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8781 the resulting offset is smaller in absolute value than the
8783 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8784 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8785 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8786 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8787 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8788 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8789 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8791 tree const1 = TREE_OPERAND (arg0, 1);
8792 tree const2 = TREE_OPERAND (arg1, 1);
8793 tree variable1 = TREE_OPERAND (arg0, 0);
8794 tree variable2 = TREE_OPERAND (arg1, 0);
8796 const char * const warnmsg = G_("assuming signed overflow does not "
8797 "occur when combining constants around "
8800 /* Put the constant on the side where it doesn't overflow and is
8801 of lower absolute value than before. */
8802 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8803 ? MINUS_EXPR : PLUS_EXPR,
8805 if (!TREE_OVERFLOW (cst)
8806 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8808 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8809 return fold_build2_loc (loc, code, type,
8811 fold_build2_loc (loc,
8812 TREE_CODE (arg1), TREE_TYPE (arg1),
8816 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8817 ? MINUS_EXPR : PLUS_EXPR,
8819 if (!TREE_OVERFLOW (cst)
8820 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8822 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8823 return fold_build2_loc (loc, code, type,
8824 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8830 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8831 signed arithmetic case. That form is created by the compiler
8832 often enough for folding it to be of value. One example is in
8833 computing loop trip counts after Operator Strength Reduction. */
8834 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8835 && TREE_CODE (arg0) == MULT_EXPR
8836 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8837 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8838 && integer_zerop (arg1))
8840 tree const1 = TREE_OPERAND (arg0, 1);
8841 tree const2 = arg1; /* zero */
8842 tree variable1 = TREE_OPERAND (arg0, 0);
8843 enum tree_code cmp_code = code;
8845 /* Handle unfolded multiplication by zero. */
8846 if (integer_zerop (const1))
8847 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8849 fold_overflow_warning (("assuming signed overflow does not occur when "
8850 "eliminating multiplication in comparison "
8852 WARN_STRICT_OVERFLOW_COMPARISON);
8854 /* If const1 is negative we swap the sense of the comparison. */
8855 if (tree_int_cst_sgn (const1) < 0)
8856 cmp_code = swap_tree_comparison (cmp_code);
8858 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8861 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8865 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8867 tree targ0 = strip_float_extensions (arg0);
8868 tree targ1 = strip_float_extensions (arg1);
8869 tree newtype = TREE_TYPE (targ0);
8871 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8872 newtype = TREE_TYPE (targ1);
8874 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8875 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8876 return fold_build2_loc (loc, code, type,
8877 fold_convert_loc (loc, newtype, targ0),
8878 fold_convert_loc (loc, newtype, targ1));
8880 /* (-a) CMP (-b) -> b CMP a */
8881 if (TREE_CODE (arg0) == NEGATE_EXPR
8882 && TREE_CODE (arg1) == NEGATE_EXPR)
8883 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8884 TREE_OPERAND (arg0, 0));
8886 if (TREE_CODE (arg1) == REAL_CST)
8888 REAL_VALUE_TYPE cst;
8889 cst = TREE_REAL_CST (arg1);
8891 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8892 if (TREE_CODE (arg0) == NEGATE_EXPR)
8893 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8894 TREE_OPERAND (arg0, 0),
8895 build_real (TREE_TYPE (arg1),
8896 real_value_negate (&cst)));
8898 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8899 /* a CMP (-0) -> a CMP 0 */
8900 if (REAL_VALUE_MINUS_ZERO (cst))
8901 return fold_build2_loc (loc, code, type, arg0,
8902 build_real (TREE_TYPE (arg1), dconst0));
8904 /* x != NaN is always true, other ops are always false. */
8905 if (REAL_VALUE_ISNAN (cst)
8906 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8908 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8909 return omit_one_operand_loc (loc, type, tem, arg0);
8912 /* Fold comparisons against infinity. */
8913 if (REAL_VALUE_ISINF (cst)
8914 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8916 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8917 if (tem != NULL_TREE)
8922 /* If this is a comparison of a real constant with a PLUS_EXPR
8923 or a MINUS_EXPR of a real constant, we can convert it into a
8924 comparison with a revised real constant as long as no overflow
8925 occurs when unsafe_math_optimizations are enabled. */
8926 if (flag_unsafe_math_optimizations
8927 && TREE_CODE (arg1) == REAL_CST
8928 && (TREE_CODE (arg0) == PLUS_EXPR
8929 || TREE_CODE (arg0) == MINUS_EXPR)
8930 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8931 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8932 ? MINUS_EXPR : PLUS_EXPR,
8933 arg1, TREE_OPERAND (arg0, 1)))
8934 && !TREE_OVERFLOW (tem))
8935 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8937 /* Likewise, we can simplify a comparison of a real constant with
8938 a MINUS_EXPR whose first operand is also a real constant, i.e.
8939 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8940 floating-point types only if -fassociative-math is set. */
8941 if (flag_associative_math
8942 && TREE_CODE (arg1) == REAL_CST
8943 && TREE_CODE (arg0) == MINUS_EXPR
8944 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8945 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8947 && !TREE_OVERFLOW (tem))
8948 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8949 TREE_OPERAND (arg0, 1), tem);
8951 /* Fold comparisons against built-in math functions. */
8952 if (TREE_CODE (arg1) == REAL_CST
8953 && flag_unsafe_math_optimizations
8954 && ! flag_errno_math)
8956 enum built_in_function fcode = builtin_mathfn_code (arg0);
8958 if (fcode != END_BUILTINS)
8960 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8961 if (tem != NULL_TREE)
8967 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8968 && CONVERT_EXPR_P (arg0))
8970 /* If we are widening one operand of an integer comparison,
8971 see if the other operand is similarly being widened. Perhaps we
8972 can do the comparison in the narrower type. */
8973 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8977 /* Or if we are changing signedness. */
8978 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8983 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8984 constant, we can simplify it. */
8985 if (TREE_CODE (arg1) == INTEGER_CST
8986 && (TREE_CODE (arg0) == MIN_EXPR
8987 || TREE_CODE (arg0) == MAX_EXPR)
8988 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8990 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8995 /* Simplify comparison of something with itself. (For IEEE
8996 floating-point, we can only do some of these simplifications.) */
8997 if (operand_equal_p (arg0, arg1, 0))
9002 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9003 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9004 return constant_boolean_node (1, type);
9009 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9010 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9011 return constant_boolean_node (1, type);
9012 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9015 /* For NE, we can only do this simplification if integer
9016 or we don't honor IEEE floating point NaNs. */
9017 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9018 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9020 /* ... fall through ... */
9023 return constant_boolean_node (0, type);
9029 /* If we are comparing an expression that just has comparisons
9030 of two integer values, arithmetic expressions of those comparisons,
9031 and constants, we can simplify it. There are only three cases
9032 to check: the two values can either be equal, the first can be
9033 greater, or the second can be greater. Fold the expression for
9034 those three values. Since each value must be 0 or 1, we have
9035 eight possibilities, each of which corresponds to the constant 0
9036 or 1 or one of the six possible comparisons.
9038 This handles common cases like (a > b) == 0 but also handles
9039 expressions like ((x > y) - (y > x)) > 0, which supposedly
9040 occur in macroized code. */
9042 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9044 tree cval1 = 0, cval2 = 0;
9047 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9048 /* Don't handle degenerate cases here; they should already
9049 have been handled anyway. */
9050 && cval1 != 0 && cval2 != 0
9051 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9052 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9053 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9054 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9055 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9056 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9057 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9059 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9060 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9062 /* We can't just pass T to eval_subst in case cval1 or cval2
9063 was the same as ARG1. */
9066 = fold_build2_loc (loc, code, type,
9067 eval_subst (loc, arg0, cval1, maxval,
9071 = fold_build2_loc (loc, code, type,
9072 eval_subst (loc, arg0, cval1, maxval,
9076 = fold_build2_loc (loc, code, type,
9077 eval_subst (loc, arg0, cval1, minval,
9081 /* All three of these results should be 0 or 1. Confirm they are.
9082 Then use those values to select the proper code to use. */
9084 if (TREE_CODE (high_result) == INTEGER_CST
9085 && TREE_CODE (equal_result) == INTEGER_CST
9086 && TREE_CODE (low_result) == INTEGER_CST)
9088 /* Make a 3-bit mask with the high-order bit being the
9089 value for `>', the next for '=', and the low for '<'. */
9090 switch ((integer_onep (high_result) * 4)
9091 + (integer_onep (equal_result) * 2)
9092 + integer_onep (low_result))
9096 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9117 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9122 tem = save_expr (build2 (code, type, cval1, cval2));
9123 SET_EXPR_LOCATION (tem, loc);
9126 return fold_build2_loc (loc, code, type, cval1, cval2);
9131 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9132 into a single range test. */
9133 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9134 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9135 && TREE_CODE (arg1) == INTEGER_CST
9136 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9137 && !integer_zerop (TREE_OPERAND (arg0, 1))
9138 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9139 && !TREE_OVERFLOW (arg1))
9141 tem = fold_div_compare (loc, code, type, arg0, arg1);
9142 if (tem != NULL_TREE)
9146 /* Fold ~X op ~Y as Y op X. */
9147 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9148 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9150 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9151 return fold_build2_loc (loc, code, type,
9152 fold_convert_loc (loc, cmp_type,
9153 TREE_OPERAND (arg1, 0)),
9154 TREE_OPERAND (arg0, 0));
9157 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9158 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9159 && TREE_CODE (arg1) == INTEGER_CST)
9161 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9162 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9163 TREE_OPERAND (arg0, 0),
9164 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9165 fold_convert_loc (loc, cmp_type, arg1)));
9172 /* Subroutine of fold_binary. Optimize complex multiplications of the
9173 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9174 argument EXPR represents the expression "z" of type TYPE. */
9177 fold_mult_zconjz (location_t loc, tree type, tree expr)
9179 tree itype = TREE_TYPE (type);
9180 tree rpart, ipart, tem;
9182 if (TREE_CODE (expr) == COMPLEX_EXPR)
9184 rpart = TREE_OPERAND (expr, 0);
9185 ipart = TREE_OPERAND (expr, 1);
9187 else if (TREE_CODE (expr) == COMPLEX_CST)
9189 rpart = TREE_REALPART (expr);
9190 ipart = TREE_IMAGPART (expr);
9194 expr = save_expr (expr);
9195 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9196 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9199 rpart = save_expr (rpart);
9200 ipart = save_expr (ipart);
9201 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9202 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9203 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9204 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9205 build_zero_cst (itype));
9209 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9210 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9211 guarantees that P and N have the same least significant log2(M) bits.
9212 N is not otherwise constrained. In particular, N is not normalized to
9213 0 <= N < M as is common. In general, the precise value of P is unknown.
9214 M is chosen as large as possible such that constant N can be determined.
9216 Returns M and sets *RESIDUE to N.
9218 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9219 account. This is not always possible due to PR 35705.
9222 static unsigned HOST_WIDE_INT
9223 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9224 bool allow_func_align)
9226 enum tree_code code;
9230 code = TREE_CODE (expr);
9231 if (code == ADDR_EXPR)
9233 unsigned int bitalign;
9234 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9235 *residue /= BITS_PER_UNIT;
9236 return bitalign / BITS_PER_UNIT;
9238 else if (code == POINTER_PLUS_EXPR)
9241 unsigned HOST_WIDE_INT modulus;
9242 enum tree_code inner_code;
9244 op0 = TREE_OPERAND (expr, 0);
9246 modulus = get_pointer_modulus_and_residue (op0, residue,
9249 op1 = TREE_OPERAND (expr, 1);
9251 inner_code = TREE_CODE (op1);
9252 if (inner_code == INTEGER_CST)
9254 *residue += TREE_INT_CST_LOW (op1);
9257 else if (inner_code == MULT_EXPR)
9259 op1 = TREE_OPERAND (op1, 1);
9260 if (TREE_CODE (op1) == INTEGER_CST)
9262 unsigned HOST_WIDE_INT align;
9264 /* Compute the greatest power-of-2 divisor of op1. */
9265 align = TREE_INT_CST_LOW (op1);
9268 /* If align is non-zero and less than *modulus, replace
9269 *modulus with align., If align is 0, then either op1 is 0
9270 or the greatest power-of-2 divisor of op1 doesn't fit in an
9271 unsigned HOST_WIDE_INT. In either case, no additional
9272 constraint is imposed. */
9274 modulus = MIN (modulus, align);
9281 /* If we get here, we were unable to determine anything useful about the
9287 /* Fold a binary expression of code CODE and type TYPE with operands
9288 OP0 and OP1. LOC is the location of the resulting expression.
9289 Return the folded expression if folding is successful. Otherwise,
9290 return NULL_TREE. */
9293 fold_binary_loc (location_t loc,
9294 enum tree_code code, tree type, tree op0, tree op1)
9296 enum tree_code_class kind = TREE_CODE_CLASS (code);
9297 tree arg0, arg1, tem;
9298 tree t1 = NULL_TREE;
9299 bool strict_overflow_p;
9301 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9302 && TREE_CODE_LENGTH (code) == 2
9304 && op1 != NULL_TREE);
9309 /* Strip any conversions that don't change the mode. This is
9310 safe for every expression, except for a comparison expression
9311 because its signedness is derived from its operands. So, in
9312 the latter case, only strip conversions that don't change the
9313 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9316 Note that this is done as an internal manipulation within the
9317 constant folder, in order to find the simplest representation
9318 of the arguments so that their form can be studied. In any
9319 cases, the appropriate type conversions should be put back in
9320 the tree that will get out of the constant folder. */
9322 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9324 STRIP_SIGN_NOPS (arg0);
9325 STRIP_SIGN_NOPS (arg1);
9333 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9334 constant but we can't do arithmetic on them. */
9335 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9336 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9337 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9338 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9339 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9340 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9342 if (kind == tcc_binary)
9344 /* Make sure type and arg0 have the same saturating flag. */
9345 gcc_assert (TYPE_SATURATING (type)
9346 == TYPE_SATURATING (TREE_TYPE (arg0)));
9347 tem = const_binop (code, arg0, arg1);
9349 else if (kind == tcc_comparison)
9350 tem = fold_relational_const (code, type, arg0, arg1);
9354 if (tem != NULL_TREE)
9356 if (TREE_TYPE (tem) != type)
9357 tem = fold_convert_loc (loc, type, tem);
9362 /* If this is a commutative operation, and ARG0 is a constant, move it
9363 to ARG1 to reduce the number of tests below. */
9364 if (commutative_tree_code (code)
9365 && tree_swap_operands_p (arg0, arg1, true))
9366 return fold_build2_loc (loc, code, type, op1, op0);
9368 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9370 First check for cases where an arithmetic operation is applied to a
9371 compound, conditional, or comparison operation. Push the arithmetic
9372 operation inside the compound or conditional to see if any folding
9373 can then be done. Convert comparison to conditional for this purpose.
9374 The also optimizes non-constant cases that used to be done in
9377 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9378 one of the operands is a comparison and the other is a comparison, a
9379 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9380 code below would make the expression more complex. Change it to a
9381 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9382 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9384 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9385 || code == EQ_EXPR || code == NE_EXPR)
9386 && ((truth_value_p (TREE_CODE (arg0))
9387 && (truth_value_p (TREE_CODE (arg1))
9388 || (TREE_CODE (arg1) == BIT_AND_EXPR
9389 && integer_onep (TREE_OPERAND (arg1, 1)))))
9390 || (truth_value_p (TREE_CODE (arg1))
9391 && (truth_value_p (TREE_CODE (arg0))
9392 || (TREE_CODE (arg0) == BIT_AND_EXPR
9393 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9395 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9396 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9399 fold_convert_loc (loc, boolean_type_node, arg0),
9400 fold_convert_loc (loc, boolean_type_node, arg1));
9402 if (code == EQ_EXPR)
9403 tem = invert_truthvalue_loc (loc, tem);
9405 return fold_convert_loc (loc, type, tem);
9408 if (TREE_CODE_CLASS (code) == tcc_binary
9409 || TREE_CODE_CLASS (code) == tcc_comparison)
9411 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9413 tem = fold_build2_loc (loc, code, type,
9414 fold_convert_loc (loc, TREE_TYPE (op0),
9415 TREE_OPERAND (arg0, 1)), op1);
9416 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9419 if (TREE_CODE (arg1) == COMPOUND_EXPR
9420 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9422 tem = fold_build2_loc (loc, code, type, op0,
9423 fold_convert_loc (loc, TREE_TYPE (op1),
9424 TREE_OPERAND (arg1, 1)));
9425 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9429 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9431 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9433 /*cond_first_p=*/1);
9434 if (tem != NULL_TREE)
9438 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9440 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9442 /*cond_first_p=*/0);
9443 if (tem != NULL_TREE)
9451 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9452 if (TREE_CODE (arg0) == ADDR_EXPR
9453 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9455 tree iref = TREE_OPERAND (arg0, 0);
9456 return fold_build2 (MEM_REF, type,
9457 TREE_OPERAND (iref, 0),
9458 int_const_binop (PLUS_EXPR, arg1,
9459 TREE_OPERAND (iref, 1), 0));
9462 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9463 if (TREE_CODE (arg0) == ADDR_EXPR
9464 && handled_component_p (TREE_OPERAND (arg0, 0)))
9467 HOST_WIDE_INT coffset;
9468 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9472 return fold_build2 (MEM_REF, type,
9473 build_fold_addr_expr (base),
9474 int_const_binop (PLUS_EXPR, arg1,
9475 size_int (coffset), 0));
9480 case POINTER_PLUS_EXPR:
9481 /* 0 +p index -> (type)index */
9482 if (integer_zerop (arg0))
9483 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9485 /* PTR +p 0 -> PTR */
9486 if (integer_zerop (arg1))
9487 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9489 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9490 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9491 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9492 return fold_convert_loc (loc, type,
9493 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9494 fold_convert_loc (loc, sizetype,
9496 fold_convert_loc (loc, sizetype,
9499 /* index +p PTR -> PTR +p index */
9500 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9501 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9502 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9503 fold_convert_loc (loc, type, arg1),
9504 fold_convert_loc (loc, sizetype, arg0));
9506 /* (PTR +p B) +p A -> PTR +p (B + A) */
9507 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9510 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9511 tree arg00 = TREE_OPERAND (arg0, 0);
9512 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9513 arg01, fold_convert_loc (loc, sizetype, arg1));
9514 return fold_convert_loc (loc, type,
9515 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9520 /* PTR_CST +p CST -> CST1 */
9521 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9522 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9523 fold_convert_loc (loc, type, arg1));
9525 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9526 of the array. Loop optimizer sometimes produce this type of
9528 if (TREE_CODE (arg0) == ADDR_EXPR)
9530 tem = try_move_mult_to_index (loc, arg0,
9531 fold_convert_loc (loc, sizetype, arg1));
9533 return fold_convert_loc (loc, type, tem);
9539 /* A + (-B) -> A - B */
9540 if (TREE_CODE (arg1) == NEGATE_EXPR)
9541 return fold_build2_loc (loc, MINUS_EXPR, type,
9542 fold_convert_loc (loc, type, arg0),
9543 fold_convert_loc (loc, type,
9544 TREE_OPERAND (arg1, 0)));
9545 /* (-A) + B -> B - A */
9546 if (TREE_CODE (arg0) == NEGATE_EXPR
9547 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9548 return fold_build2_loc (loc, MINUS_EXPR, type,
9549 fold_convert_loc (loc, type, arg1),
9550 fold_convert_loc (loc, type,
9551 TREE_OPERAND (arg0, 0)));
9553 if (INTEGRAL_TYPE_P (type))
9555 /* Convert ~A + 1 to -A. */
9556 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9557 && integer_onep (arg1))
9558 return fold_build1_loc (loc, NEGATE_EXPR, type,
9559 fold_convert_loc (loc, type,
9560 TREE_OPERAND (arg0, 0)));
9563 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9564 && !TYPE_OVERFLOW_TRAPS (type))
9566 tree tem = TREE_OPERAND (arg0, 0);
9569 if (operand_equal_p (tem, arg1, 0))
9571 t1 = build_int_cst_type (type, -1);
9572 return omit_one_operand_loc (loc, type, t1, arg1);
9577 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9578 && !TYPE_OVERFLOW_TRAPS (type))
9580 tree tem = TREE_OPERAND (arg1, 0);
9583 if (operand_equal_p (arg0, tem, 0))
9585 t1 = build_int_cst_type (type, -1);
9586 return omit_one_operand_loc (loc, type, t1, arg0);
9590 /* X + (X / CST) * -CST is X % CST. */
9591 if (TREE_CODE (arg1) == MULT_EXPR
9592 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9593 && operand_equal_p (arg0,
9594 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9596 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9597 tree cst1 = TREE_OPERAND (arg1, 1);
9598 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9600 if (sum && integer_zerop (sum))
9601 return fold_convert_loc (loc, type,
9602 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9603 TREE_TYPE (arg0), arg0,
9608 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9609 same or one. Make sure type is not saturating.
9610 fold_plusminus_mult_expr will re-associate. */
9611 if ((TREE_CODE (arg0) == MULT_EXPR
9612 || TREE_CODE (arg1) == MULT_EXPR)
9613 && !TYPE_SATURATING (type)
9614 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9616 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9621 if (! FLOAT_TYPE_P (type))
9623 if (integer_zerop (arg1))
9624 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9626 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9627 with a constant, and the two constants have no bits in common,
9628 we should treat this as a BIT_IOR_EXPR since this may produce more
9630 if (TREE_CODE (arg0) == BIT_AND_EXPR
9631 && TREE_CODE (arg1) == BIT_AND_EXPR
9632 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9633 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9634 && integer_zerop (const_binop (BIT_AND_EXPR,
9635 TREE_OPERAND (arg0, 1),
9636 TREE_OPERAND (arg1, 1))))
9638 code = BIT_IOR_EXPR;
9642 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9643 (plus (plus (mult) (mult)) (foo)) so that we can
9644 take advantage of the factoring cases below. */
9645 if (((TREE_CODE (arg0) == PLUS_EXPR
9646 || TREE_CODE (arg0) == MINUS_EXPR)
9647 && TREE_CODE (arg1) == MULT_EXPR)
9648 || ((TREE_CODE (arg1) == PLUS_EXPR
9649 || TREE_CODE (arg1) == MINUS_EXPR)
9650 && TREE_CODE (arg0) == MULT_EXPR))
9652 tree parg0, parg1, parg, marg;
9653 enum tree_code pcode;
9655 if (TREE_CODE (arg1) == MULT_EXPR)
9656 parg = arg0, marg = arg1;
9658 parg = arg1, marg = arg0;
9659 pcode = TREE_CODE (parg);
9660 parg0 = TREE_OPERAND (parg, 0);
9661 parg1 = TREE_OPERAND (parg, 1);
9665 if (TREE_CODE (parg0) == MULT_EXPR
9666 && TREE_CODE (parg1) != MULT_EXPR)
9667 return fold_build2_loc (loc, pcode, type,
9668 fold_build2_loc (loc, PLUS_EXPR, type,
9669 fold_convert_loc (loc, type,
9671 fold_convert_loc (loc, type,
9673 fold_convert_loc (loc, type, parg1));
9674 if (TREE_CODE (parg0) != MULT_EXPR
9675 && TREE_CODE (parg1) == MULT_EXPR)
9677 fold_build2_loc (loc, PLUS_EXPR, type,
9678 fold_convert_loc (loc, type, parg0),
9679 fold_build2_loc (loc, pcode, type,
9680 fold_convert_loc (loc, type, marg),
9681 fold_convert_loc (loc, type,
9687 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9688 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9689 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9691 /* Likewise if the operands are reversed. */
9692 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9693 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9695 /* Convert X + -C into X - C. */
9696 if (TREE_CODE (arg1) == REAL_CST
9697 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9699 tem = fold_negate_const (arg1, type);
9700 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9701 return fold_build2_loc (loc, MINUS_EXPR, type,
9702 fold_convert_loc (loc, type, arg0),
9703 fold_convert_loc (loc, type, tem));
9706 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9707 to __complex__ ( x, y ). This is not the same for SNaNs or
9708 if signed zeros are involved. */
9709 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9710 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9711 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9713 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9714 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9715 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9716 bool arg0rz = false, arg0iz = false;
9717 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9718 || (arg0i && (arg0iz = real_zerop (arg0i))))
9720 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9721 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9722 if (arg0rz && arg1i && real_zerop (arg1i))
9724 tree rp = arg1r ? arg1r
9725 : build1 (REALPART_EXPR, rtype, arg1);
9726 tree ip = arg0i ? arg0i
9727 : build1 (IMAGPART_EXPR, rtype, arg0);
9728 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9730 else if (arg0iz && arg1r && real_zerop (arg1r))
9732 tree rp = arg0r ? arg0r
9733 : build1 (REALPART_EXPR, rtype, arg0);
9734 tree ip = arg1i ? arg1i
9735 : build1 (IMAGPART_EXPR, rtype, arg1);
9736 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9741 if (flag_unsafe_math_optimizations
9742 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9743 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9744 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9747 /* Convert x+x into x*2.0. */
9748 if (operand_equal_p (arg0, arg1, 0)
9749 && SCALAR_FLOAT_TYPE_P (type))
9750 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9751 build_real (type, dconst2));
9753 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9754 We associate floats only if the user has specified
9755 -fassociative-math. */
9756 if (flag_associative_math
9757 && TREE_CODE (arg1) == PLUS_EXPR
9758 && TREE_CODE (arg0) != MULT_EXPR)
9760 tree tree10 = TREE_OPERAND (arg1, 0);
9761 tree tree11 = TREE_OPERAND (arg1, 1);
9762 if (TREE_CODE (tree11) == MULT_EXPR
9763 && TREE_CODE (tree10) == MULT_EXPR)
9766 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9767 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9770 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9771 We associate floats only if the user has specified
9772 -fassociative-math. */
9773 if (flag_associative_math
9774 && TREE_CODE (arg0) == PLUS_EXPR
9775 && TREE_CODE (arg1) != MULT_EXPR)
9777 tree tree00 = TREE_OPERAND (arg0, 0);
9778 tree tree01 = TREE_OPERAND (arg0, 1);
9779 if (TREE_CODE (tree01) == MULT_EXPR
9780 && TREE_CODE (tree00) == MULT_EXPR)
9783 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9784 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9790 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9791 is a rotate of A by C1 bits. */
9792 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9793 is a rotate of A by B bits. */
9795 enum tree_code code0, code1;
9797 code0 = TREE_CODE (arg0);
9798 code1 = TREE_CODE (arg1);
9799 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9800 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9801 && operand_equal_p (TREE_OPERAND (arg0, 0),
9802 TREE_OPERAND (arg1, 0), 0)
9803 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9804 TYPE_UNSIGNED (rtype))
9805 /* Only create rotates in complete modes. Other cases are not
9806 expanded properly. */
9807 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9809 tree tree01, tree11;
9810 enum tree_code code01, code11;
9812 tree01 = TREE_OPERAND (arg0, 1);
9813 tree11 = TREE_OPERAND (arg1, 1);
9814 STRIP_NOPS (tree01);
9815 STRIP_NOPS (tree11);
9816 code01 = TREE_CODE (tree01);
9817 code11 = TREE_CODE (tree11);
9818 if (code01 == INTEGER_CST
9819 && code11 == INTEGER_CST
9820 && TREE_INT_CST_HIGH (tree01) == 0
9821 && TREE_INT_CST_HIGH (tree11) == 0
9822 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9823 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9825 tem = build2_loc (loc, LROTATE_EXPR,
9826 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9827 TREE_OPERAND (arg0, 0),
9828 code0 == LSHIFT_EXPR ? tree01 : tree11);
9829 return fold_convert_loc (loc, type, tem);
9831 else if (code11 == MINUS_EXPR)
9833 tree tree110, tree111;
9834 tree110 = TREE_OPERAND (tree11, 0);
9835 tree111 = TREE_OPERAND (tree11, 1);
9836 STRIP_NOPS (tree110);
9837 STRIP_NOPS (tree111);
9838 if (TREE_CODE (tree110) == INTEGER_CST
9839 && 0 == compare_tree_int (tree110,
9841 (TREE_TYPE (TREE_OPERAND
9843 && operand_equal_p (tree01, tree111, 0))
9845 fold_convert_loc (loc, type,
9846 build2 ((code0 == LSHIFT_EXPR
9849 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9850 TREE_OPERAND (arg0, 0), tree01));
9852 else if (code01 == MINUS_EXPR)
9854 tree tree010, tree011;
9855 tree010 = TREE_OPERAND (tree01, 0);
9856 tree011 = TREE_OPERAND (tree01, 1);
9857 STRIP_NOPS (tree010);
9858 STRIP_NOPS (tree011);
9859 if (TREE_CODE (tree010) == INTEGER_CST
9860 && 0 == compare_tree_int (tree010,
9862 (TREE_TYPE (TREE_OPERAND
9864 && operand_equal_p (tree11, tree011, 0))
9865 return fold_convert_loc
9867 build2 ((code0 != LSHIFT_EXPR
9870 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9871 TREE_OPERAND (arg0, 0), tree11));
9877 /* In most languages, can't associate operations on floats through
9878 parentheses. Rather than remember where the parentheses were, we
9879 don't associate floats at all, unless the user has specified
9881 And, we need to make sure type is not saturating. */
9883 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9884 && !TYPE_SATURATING (type))
9886 tree var0, con0, lit0, minus_lit0;
9887 tree var1, con1, lit1, minus_lit1;
9890 /* Split both trees into variables, constants, and literals. Then
9891 associate each group together, the constants with literals,
9892 then the result with variables. This increases the chances of
9893 literals being recombined later and of generating relocatable
9894 expressions for the sum of a constant and literal. */
9895 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9896 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9897 code == MINUS_EXPR);
9899 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9900 if (code == MINUS_EXPR)
9903 /* With undefined overflow we can only associate constants with one
9904 variable, and constants whose association doesn't overflow. */
9905 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9906 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9913 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9914 tmp0 = TREE_OPERAND (tmp0, 0);
9915 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9916 tmp1 = TREE_OPERAND (tmp1, 0);
9917 /* The only case we can still associate with two variables
9918 is if they are the same, modulo negation. */
9919 if (!operand_equal_p (tmp0, tmp1, 0))
9923 if (ok && lit0 && lit1)
9925 tree tmp0 = fold_convert (type, lit0);
9926 tree tmp1 = fold_convert (type, lit1);
9928 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9929 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9934 /* Only do something if we found more than two objects. Otherwise,
9935 nothing has changed and we risk infinite recursion. */
9937 && (2 < ((var0 != 0) + (var1 != 0)
9938 + (con0 != 0) + (con1 != 0)
9939 + (lit0 != 0) + (lit1 != 0)
9940 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9942 var0 = associate_trees (loc, var0, var1, code, type);
9943 con0 = associate_trees (loc, con0, con1, code, type);
9944 lit0 = associate_trees (loc, lit0, lit1, code, type);
9945 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9947 /* Preserve the MINUS_EXPR if the negative part of the literal is
9948 greater than the positive part. Otherwise, the multiplicative
9949 folding code (i.e extract_muldiv) may be fooled in case
9950 unsigned constants are subtracted, like in the following
9951 example: ((X*2 + 4) - 8U)/2. */
9952 if (minus_lit0 && lit0)
9954 if (TREE_CODE (lit0) == INTEGER_CST
9955 && TREE_CODE (minus_lit0) == INTEGER_CST
9956 && tree_int_cst_lt (lit0, minus_lit0))
9958 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9964 lit0 = associate_trees (loc, lit0, minus_lit0,
9973 fold_convert_loc (loc, type,
9974 associate_trees (loc, var0, minus_lit0,
9978 con0 = associate_trees (loc, con0, minus_lit0,
9981 fold_convert_loc (loc, type,
9982 associate_trees (loc, var0, con0,
9987 con0 = associate_trees (loc, con0, lit0, code, type);
9989 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9997 /* Pointer simplifications for subtraction, simple reassociations. */
9998 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10000 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10001 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10002 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10004 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10005 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10006 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10007 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10008 return fold_build2_loc (loc, PLUS_EXPR, type,
10009 fold_build2_loc (loc, MINUS_EXPR, type,
10011 fold_build2_loc (loc, MINUS_EXPR, type,
10014 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10015 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10017 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10018 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10019 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10020 fold_convert_loc (loc, type, arg1));
10022 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10025 /* A - (-B) -> A + B */
10026 if (TREE_CODE (arg1) == NEGATE_EXPR)
10027 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10028 fold_convert_loc (loc, type,
10029 TREE_OPERAND (arg1, 0)));
10030 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10031 if (TREE_CODE (arg0) == NEGATE_EXPR
10032 && (FLOAT_TYPE_P (type)
10033 || INTEGRAL_TYPE_P (type))
10034 && negate_expr_p (arg1)
10035 && reorder_operands_p (arg0, arg1))
10036 return fold_build2_loc (loc, MINUS_EXPR, type,
10037 fold_convert_loc (loc, type,
10038 negate_expr (arg1)),
10039 fold_convert_loc (loc, type,
10040 TREE_OPERAND (arg0, 0)));
10041 /* Convert -A - 1 to ~A. */
10042 if (INTEGRAL_TYPE_P (type)
10043 && TREE_CODE (arg0) == NEGATE_EXPR
10044 && integer_onep (arg1)
10045 && !TYPE_OVERFLOW_TRAPS (type))
10046 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10047 fold_convert_loc (loc, type,
10048 TREE_OPERAND (arg0, 0)));
10050 /* Convert -1 - A to ~A. */
10051 if (INTEGRAL_TYPE_P (type)
10052 && integer_all_onesp (arg0))
10053 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10056 /* X - (X / CST) * CST is X % CST. */
10057 if (INTEGRAL_TYPE_P (type)
10058 && TREE_CODE (arg1) == MULT_EXPR
10059 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10060 && operand_equal_p (arg0,
10061 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10062 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10063 TREE_OPERAND (arg1, 1), 0))
10065 fold_convert_loc (loc, type,
10066 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10067 arg0, TREE_OPERAND (arg1, 1)));
10069 if (! FLOAT_TYPE_P (type))
10071 if (integer_zerop (arg0))
10072 return negate_expr (fold_convert_loc (loc, type, arg1));
10073 if (integer_zerop (arg1))
10074 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10076 /* Fold A - (A & B) into ~B & A. */
10077 if (!TREE_SIDE_EFFECTS (arg0)
10078 && TREE_CODE (arg1) == BIT_AND_EXPR)
10080 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10082 tree arg10 = fold_convert_loc (loc, type,
10083 TREE_OPERAND (arg1, 0));
10084 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10085 fold_build1_loc (loc, BIT_NOT_EXPR,
10087 fold_convert_loc (loc, type, arg0));
10089 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10091 tree arg11 = fold_convert_loc (loc,
10092 type, TREE_OPERAND (arg1, 1));
10093 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10094 fold_build1_loc (loc, BIT_NOT_EXPR,
10096 fold_convert_loc (loc, type, arg0));
10100 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10101 any power of 2 minus 1. */
10102 if (TREE_CODE (arg0) == BIT_AND_EXPR
10103 && TREE_CODE (arg1) == BIT_AND_EXPR
10104 && operand_equal_p (TREE_OPERAND (arg0, 0),
10105 TREE_OPERAND (arg1, 0), 0))
10107 tree mask0 = TREE_OPERAND (arg0, 1);
10108 tree mask1 = TREE_OPERAND (arg1, 1);
10109 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10111 if (operand_equal_p (tem, mask1, 0))
10113 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10114 TREE_OPERAND (arg0, 0), mask1);
10115 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10120 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10121 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10122 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10124 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10125 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10126 (-ARG1 + ARG0) reduces to -ARG1. */
10127 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10128 return negate_expr (fold_convert_loc (loc, type, arg1));
10130 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10131 __complex__ ( x, -y ). This is not the same for SNaNs or if
10132 signed zeros are involved. */
10133 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10134 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10135 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10137 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10138 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10139 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10140 bool arg0rz = false, arg0iz = false;
10141 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10142 || (arg0i && (arg0iz = real_zerop (arg0i))))
10144 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10145 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10146 if (arg0rz && arg1i && real_zerop (arg1i))
10148 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10150 : build1 (REALPART_EXPR, rtype, arg1));
10151 tree ip = arg0i ? arg0i
10152 : build1 (IMAGPART_EXPR, rtype, arg0);
10153 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10155 else if (arg0iz && arg1r && real_zerop (arg1r))
10157 tree rp = arg0r ? arg0r
10158 : build1 (REALPART_EXPR, rtype, arg0);
10159 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10161 : build1 (IMAGPART_EXPR, rtype, arg1));
10162 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10167 /* Fold &x - &x. This can happen from &x.foo - &x.
10168 This is unsafe for certain floats even in non-IEEE formats.
10169 In IEEE, it is unsafe because it does wrong for NaNs.
10170 Also note that operand_equal_p is always false if an operand
10173 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10174 && operand_equal_p (arg0, arg1, 0))
10175 return build_zero_cst (type);
10177 /* A - B -> A + (-B) if B is easily negatable. */
10178 if (negate_expr_p (arg1)
10179 && ((FLOAT_TYPE_P (type)
10180 /* Avoid this transformation if B is a positive REAL_CST. */
10181 && (TREE_CODE (arg1) != REAL_CST
10182 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10183 || INTEGRAL_TYPE_P (type)))
10184 return fold_build2_loc (loc, PLUS_EXPR, type,
10185 fold_convert_loc (loc, type, arg0),
10186 fold_convert_loc (loc, type,
10187 negate_expr (arg1)));
10189 /* Try folding difference of addresses. */
10191 HOST_WIDE_INT diff;
10193 if ((TREE_CODE (arg0) == ADDR_EXPR
10194 || TREE_CODE (arg1) == ADDR_EXPR)
10195 && ptr_difference_const (arg0, arg1, &diff))
10196 return build_int_cst_type (type, diff);
10199 /* Fold &a[i] - &a[j] to i-j. */
10200 if (TREE_CODE (arg0) == ADDR_EXPR
10201 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10202 && TREE_CODE (arg1) == ADDR_EXPR
10203 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10205 tree aref0 = TREE_OPERAND (arg0, 0);
10206 tree aref1 = TREE_OPERAND (arg1, 0);
10207 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10208 TREE_OPERAND (aref1, 0), 0))
10210 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10211 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10212 tree esz = array_ref_element_size (aref0);
10213 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10214 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10215 fold_convert_loc (loc, type, esz));
10220 if (FLOAT_TYPE_P (type)
10221 && flag_unsafe_math_optimizations
10222 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10223 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10224 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10227 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10228 same or one. Make sure type is not saturating.
10229 fold_plusminus_mult_expr will re-associate. */
10230 if ((TREE_CODE (arg0) == MULT_EXPR
10231 || TREE_CODE (arg1) == MULT_EXPR)
10232 && !TYPE_SATURATING (type)
10233 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10235 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10243 /* (-A) * (-B) -> A * B */
10244 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10245 return fold_build2_loc (loc, MULT_EXPR, type,
10246 fold_convert_loc (loc, type,
10247 TREE_OPERAND (arg0, 0)),
10248 fold_convert_loc (loc, type,
10249 negate_expr (arg1)));
10250 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10251 return fold_build2_loc (loc, MULT_EXPR, type,
10252 fold_convert_loc (loc, type,
10253 negate_expr (arg0)),
10254 fold_convert_loc (loc, type,
10255 TREE_OPERAND (arg1, 0)));
10257 if (! FLOAT_TYPE_P (type))
10259 if (integer_zerop (arg1))
10260 return omit_one_operand_loc (loc, type, arg1, arg0);
10261 if (integer_onep (arg1))
10262 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10263 /* Transform x * -1 into -x. Make sure to do the negation
10264 on the original operand with conversions not stripped
10265 because we can only strip non-sign-changing conversions. */
10266 if (integer_all_onesp (arg1))
10267 return fold_convert_loc (loc, type, negate_expr (op0));
10268 /* Transform x * -C into -x * C if x is easily negatable. */
10269 if (TREE_CODE (arg1) == INTEGER_CST
10270 && tree_int_cst_sgn (arg1) == -1
10271 && negate_expr_p (arg0)
10272 && (tem = negate_expr (arg1)) != arg1
10273 && !TREE_OVERFLOW (tem))
10274 return fold_build2_loc (loc, MULT_EXPR, type,
10275 fold_convert_loc (loc, type,
10276 negate_expr (arg0)),
10279 /* (a * (1 << b)) is (a << b) */
10280 if (TREE_CODE (arg1) == LSHIFT_EXPR
10281 && integer_onep (TREE_OPERAND (arg1, 0)))
10282 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10283 TREE_OPERAND (arg1, 1));
10284 if (TREE_CODE (arg0) == LSHIFT_EXPR
10285 && integer_onep (TREE_OPERAND (arg0, 0)))
10286 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10287 TREE_OPERAND (arg0, 1));
10289 /* (A + A) * C -> A * 2 * C */
10290 if (TREE_CODE (arg0) == PLUS_EXPR
10291 && TREE_CODE (arg1) == INTEGER_CST
10292 && operand_equal_p (TREE_OPERAND (arg0, 0),
10293 TREE_OPERAND (arg0, 1), 0))
10294 return fold_build2_loc (loc, MULT_EXPR, type,
10295 omit_one_operand_loc (loc, type,
10296 TREE_OPERAND (arg0, 0),
10297 TREE_OPERAND (arg0, 1)),
10298 fold_build2_loc (loc, MULT_EXPR, type,
10299 build_int_cst (type, 2) , arg1));
10301 strict_overflow_p = false;
10302 if (TREE_CODE (arg1) == INTEGER_CST
10303 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10304 &strict_overflow_p)))
10306 if (strict_overflow_p)
10307 fold_overflow_warning (("assuming signed overflow does not "
10308 "occur when simplifying "
10310 WARN_STRICT_OVERFLOW_MISC);
10311 return fold_convert_loc (loc, type, tem);
10314 /* Optimize z * conj(z) for integer complex numbers. */
10315 if (TREE_CODE (arg0) == CONJ_EXPR
10316 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10317 return fold_mult_zconjz (loc, type, arg1);
10318 if (TREE_CODE (arg1) == CONJ_EXPR
10319 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10320 return fold_mult_zconjz (loc, type, arg0);
10324 /* Maybe fold x * 0 to 0. The expressions aren't the same
10325 when x is NaN, since x * 0 is also NaN. Nor are they the
10326 same in modes with signed zeros, since multiplying a
10327 negative value by 0 gives -0, not +0. */
10328 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10329 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10330 && real_zerop (arg1))
10331 return omit_one_operand_loc (loc, type, arg1, arg0);
10332 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10333 Likewise for complex arithmetic with signed zeros. */
10334 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10335 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10336 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10337 && real_onep (arg1))
10338 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10340 /* Transform x * -1.0 into -x. */
10341 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10342 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10343 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10344 && real_minus_onep (arg1))
10345 return fold_convert_loc (loc, type, negate_expr (arg0));
10347 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10348 the result for floating point types due to rounding so it is applied
10349 only if -fassociative-math was specify. */
10350 if (flag_associative_math
10351 && TREE_CODE (arg0) == RDIV_EXPR
10352 && TREE_CODE (arg1) == REAL_CST
10353 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10355 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10358 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10359 TREE_OPERAND (arg0, 1));
10362 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10363 if (operand_equal_p (arg0, arg1, 0))
10365 tree tem = fold_strip_sign_ops (arg0);
10366 if (tem != NULL_TREE)
10368 tem = fold_convert_loc (loc, type, tem);
10369 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10373 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10374 This is not the same for NaNs or if signed zeros are
10376 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10377 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10378 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10379 && TREE_CODE (arg1) == COMPLEX_CST
10380 && real_zerop (TREE_REALPART (arg1)))
10382 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10383 if (real_onep (TREE_IMAGPART (arg1)))
10385 fold_build2_loc (loc, COMPLEX_EXPR, type,
10386 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10388 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10389 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10391 fold_build2_loc (loc, COMPLEX_EXPR, type,
10392 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10393 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10397 /* Optimize z * conj(z) for floating point complex numbers.
10398 Guarded by flag_unsafe_math_optimizations as non-finite
10399 imaginary components don't produce scalar results. */
10400 if (flag_unsafe_math_optimizations
10401 && TREE_CODE (arg0) == CONJ_EXPR
10402 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10403 return fold_mult_zconjz (loc, type, arg1);
10404 if (flag_unsafe_math_optimizations
10405 && TREE_CODE (arg1) == CONJ_EXPR
10406 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10407 return fold_mult_zconjz (loc, type, arg0);
10409 if (flag_unsafe_math_optimizations)
10411 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10412 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10414 /* Optimizations of root(...)*root(...). */
10415 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10418 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10419 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10421 /* Optimize sqrt(x)*sqrt(x) as x. */
10422 if (BUILTIN_SQRT_P (fcode0)
10423 && operand_equal_p (arg00, arg10, 0)
10424 && ! HONOR_SNANS (TYPE_MODE (type)))
10427 /* Optimize root(x)*root(y) as root(x*y). */
10428 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10429 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10430 return build_call_expr_loc (loc, rootfn, 1, arg);
10433 /* Optimize expN(x)*expN(y) as expN(x+y). */
10434 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10436 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10437 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10438 CALL_EXPR_ARG (arg0, 0),
10439 CALL_EXPR_ARG (arg1, 0));
10440 return build_call_expr_loc (loc, expfn, 1, arg);
10443 /* Optimizations of pow(...)*pow(...). */
10444 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10445 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10446 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10448 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10449 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10450 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10451 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10453 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10454 if (operand_equal_p (arg01, arg11, 0))
10456 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10457 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10459 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10462 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10463 if (operand_equal_p (arg00, arg10, 0))
10465 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10466 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10468 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10472 /* Optimize tan(x)*cos(x) as sin(x). */
10473 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10474 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10475 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10476 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10477 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10478 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10479 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10480 CALL_EXPR_ARG (arg1, 0), 0))
10482 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10484 if (sinfn != NULL_TREE)
10485 return build_call_expr_loc (loc, sinfn, 1,
10486 CALL_EXPR_ARG (arg0, 0));
10489 /* Optimize x*pow(x,c) as pow(x,c+1). */
10490 if (fcode1 == BUILT_IN_POW
10491 || fcode1 == BUILT_IN_POWF
10492 || fcode1 == BUILT_IN_POWL)
10494 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10495 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10496 if (TREE_CODE (arg11) == REAL_CST
10497 && !TREE_OVERFLOW (arg11)
10498 && operand_equal_p (arg0, arg10, 0))
10500 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10504 c = TREE_REAL_CST (arg11);
10505 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10506 arg = build_real (type, c);
10507 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10511 /* Optimize pow(x,c)*x as pow(x,c+1). */
10512 if (fcode0 == BUILT_IN_POW
10513 || fcode0 == BUILT_IN_POWF
10514 || fcode0 == BUILT_IN_POWL)
10516 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10517 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10518 if (TREE_CODE (arg01) == REAL_CST
10519 && !TREE_OVERFLOW (arg01)
10520 && operand_equal_p (arg1, arg00, 0))
10522 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10526 c = TREE_REAL_CST (arg01);
10527 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10528 arg = build_real (type, c);
10529 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10533 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10534 if (optimize_function_for_speed_p (cfun)
10535 && operand_equal_p (arg0, arg1, 0))
10537 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10541 tree arg = build_real (type, dconst2);
10542 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10551 if (integer_all_onesp (arg1))
10552 return omit_one_operand_loc (loc, type, arg1, arg0);
10553 if (integer_zerop (arg1))
10554 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10555 if (operand_equal_p (arg0, arg1, 0))
10556 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10558 /* ~X | X is -1. */
10559 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10560 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10562 t1 = build_zero_cst (type);
10563 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10564 return omit_one_operand_loc (loc, type, t1, arg1);
10567 /* X | ~X is -1. */
10568 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10569 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10571 t1 = build_zero_cst (type);
10572 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10573 return omit_one_operand_loc (loc, type, t1, arg0);
10576 /* Canonicalize (X & C1) | C2. */
10577 if (TREE_CODE (arg0) == BIT_AND_EXPR
10578 && TREE_CODE (arg1) == INTEGER_CST
10579 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10581 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10582 int width = TYPE_PRECISION (type), w;
10583 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10584 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10585 hi2 = TREE_INT_CST_HIGH (arg1);
10586 lo2 = TREE_INT_CST_LOW (arg1);
10588 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10589 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10590 return omit_one_operand_loc (loc, type, arg1,
10591 TREE_OPERAND (arg0, 0));
10593 if (width > HOST_BITS_PER_WIDE_INT)
10595 mhi = (unsigned HOST_WIDE_INT) -1
10596 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10602 mlo = (unsigned HOST_WIDE_INT) -1
10603 >> (HOST_BITS_PER_WIDE_INT - width);
10606 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10607 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10608 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10609 TREE_OPERAND (arg0, 0), arg1);
10611 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10612 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10613 mode which allows further optimizations. */
10620 for (w = BITS_PER_UNIT;
10621 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10624 unsigned HOST_WIDE_INT mask
10625 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10626 if (((lo1 | lo2) & mask) == mask
10627 && (lo1 & ~mask) == 0 && hi1 == 0)
10634 if (hi3 != hi1 || lo3 != lo1)
10635 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10636 fold_build2_loc (loc, BIT_AND_EXPR, type,
10637 TREE_OPERAND (arg0, 0),
10638 build_int_cst_wide (type,
10643 /* (X & Y) | Y is (X, Y). */
10644 if (TREE_CODE (arg0) == BIT_AND_EXPR
10645 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10646 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10647 /* (X & Y) | X is (Y, X). */
10648 if (TREE_CODE (arg0) == BIT_AND_EXPR
10649 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10650 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10651 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10652 /* X | (X & Y) is (Y, X). */
10653 if (TREE_CODE (arg1) == BIT_AND_EXPR
10654 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10655 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10656 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10657 /* X | (Y & X) is (Y, X). */
10658 if (TREE_CODE (arg1) == BIT_AND_EXPR
10659 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10660 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10661 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10663 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10664 if (t1 != NULL_TREE)
10667 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10669 This results in more efficient code for machines without a NAND
10670 instruction. Combine will canonicalize to the first form
10671 which will allow use of NAND instructions provided by the
10672 backend if they exist. */
10673 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10674 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10677 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10678 build2 (BIT_AND_EXPR, type,
10679 fold_convert_loc (loc, type,
10680 TREE_OPERAND (arg0, 0)),
10681 fold_convert_loc (loc, type,
10682 TREE_OPERAND (arg1, 0))));
10685 /* See if this can be simplified into a rotate first. If that
10686 is unsuccessful continue in the association code. */
10690 if (integer_zerop (arg1))
10691 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10692 if (integer_all_onesp (arg1))
10693 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10694 if (operand_equal_p (arg0, arg1, 0))
10695 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10697 /* ~X ^ X is -1. */
10698 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10699 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10701 t1 = build_zero_cst (type);
10702 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10703 return omit_one_operand_loc (loc, type, t1, arg1);
10706 /* X ^ ~X is -1. */
10707 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10708 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10710 t1 = build_zero_cst (type);
10711 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10712 return omit_one_operand_loc (loc, type, t1, arg0);
10715 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10716 with a constant, and the two constants have no bits in common,
10717 we should treat this as a BIT_IOR_EXPR since this may produce more
10718 simplifications. */
10719 if (TREE_CODE (arg0) == BIT_AND_EXPR
10720 && TREE_CODE (arg1) == BIT_AND_EXPR
10721 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10722 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10723 && integer_zerop (const_binop (BIT_AND_EXPR,
10724 TREE_OPERAND (arg0, 1),
10725 TREE_OPERAND (arg1, 1))))
10727 code = BIT_IOR_EXPR;
10731 /* (X | Y) ^ X -> Y & ~ X*/
10732 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10733 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10735 tree t2 = TREE_OPERAND (arg0, 1);
10736 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10738 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10739 fold_convert_loc (loc, type, t2),
10740 fold_convert_loc (loc, type, t1));
10744 /* (Y | X) ^ X -> Y & ~ X*/
10745 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10746 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10748 tree t2 = TREE_OPERAND (arg0, 0);
10749 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10751 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10752 fold_convert_loc (loc, type, t2),
10753 fold_convert_loc (loc, type, t1));
10757 /* X ^ (X | Y) -> Y & ~ X*/
10758 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10759 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10761 tree t2 = TREE_OPERAND (arg1, 1);
10762 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10764 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10765 fold_convert_loc (loc, type, t2),
10766 fold_convert_loc (loc, type, t1));
10770 /* X ^ (Y | X) -> Y & ~ X*/
10771 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10772 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10774 tree t2 = TREE_OPERAND (arg1, 0);
10775 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10777 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10778 fold_convert_loc (loc, type, t2),
10779 fold_convert_loc (loc, type, t1));
10783 /* Convert ~X ^ ~Y to X ^ Y. */
10784 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10785 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10786 return fold_build2_loc (loc, code, type,
10787 fold_convert_loc (loc, type,
10788 TREE_OPERAND (arg0, 0)),
10789 fold_convert_loc (loc, type,
10790 TREE_OPERAND (arg1, 0)));
10792 /* Convert ~X ^ C to X ^ ~C. */
10793 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10794 && TREE_CODE (arg1) == INTEGER_CST)
10795 return fold_build2_loc (loc, code, type,
10796 fold_convert_loc (loc, type,
10797 TREE_OPERAND (arg0, 0)),
10798 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10800 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10801 if (TREE_CODE (arg0) == BIT_AND_EXPR
10802 && integer_onep (TREE_OPERAND (arg0, 1))
10803 && integer_onep (arg1))
10804 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10805 build_int_cst (TREE_TYPE (arg0), 0));
10807 /* Fold (X & Y) ^ Y as ~X & Y. */
10808 if (TREE_CODE (arg0) == BIT_AND_EXPR
10809 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10811 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10812 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10813 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10814 fold_convert_loc (loc, type, arg1));
10816 /* Fold (X & Y) ^ X as ~Y & X. */
10817 if (TREE_CODE (arg0) == BIT_AND_EXPR
10818 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10819 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10821 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10822 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10823 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10824 fold_convert_loc (loc, type, arg1));
10826 /* Fold X ^ (X & Y) as X & ~Y. */
10827 if (TREE_CODE (arg1) == BIT_AND_EXPR
10828 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10830 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10831 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10832 fold_convert_loc (loc, type, arg0),
10833 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10835 /* Fold X ^ (Y & X) as ~Y & X. */
10836 if (TREE_CODE (arg1) == BIT_AND_EXPR
10837 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10838 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10840 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10841 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10842 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10843 fold_convert_loc (loc, type, arg0));
10846 /* See if this can be simplified into a rotate first. If that
10847 is unsuccessful continue in the association code. */
10851 if (integer_all_onesp (arg1))
10852 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10853 if (integer_zerop (arg1))
10854 return omit_one_operand_loc (loc, type, arg1, arg0);
10855 if (operand_equal_p (arg0, arg1, 0))
10856 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10858 /* ~X & X is always zero. */
10859 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10860 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10861 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10863 /* X & ~X is always zero. */
10864 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10865 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10866 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10868 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10869 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10870 && TREE_CODE (arg1) == INTEGER_CST
10871 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10873 tree tmp1 = fold_convert_loc (loc, type, arg1);
10874 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10875 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10876 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10877 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10879 fold_convert_loc (loc, type,
10880 fold_build2_loc (loc, BIT_IOR_EXPR,
10881 type, tmp2, tmp3));
10884 /* (X | Y) & Y is (X, Y). */
10885 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10886 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10887 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10888 /* (X | Y) & X is (Y, X). */
10889 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10890 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10891 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10892 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10893 /* X & (X | Y) is (Y, X). */
10894 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10895 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10896 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10897 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10898 /* X & (Y | X) is (Y, X). */
10899 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10900 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10901 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10902 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10904 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10905 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10906 && integer_onep (TREE_OPERAND (arg0, 1))
10907 && integer_onep (arg1))
10909 tem = TREE_OPERAND (arg0, 0);
10910 return fold_build2_loc (loc, EQ_EXPR, type,
10911 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10912 build_int_cst (TREE_TYPE (tem), 1)),
10913 build_int_cst (TREE_TYPE (tem), 0));
10915 /* Fold ~X & 1 as (X & 1) == 0. */
10916 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10917 && integer_onep (arg1))
10919 tem = TREE_OPERAND (arg0, 0);
10920 return fold_build2_loc (loc, EQ_EXPR, type,
10921 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10922 build_int_cst (TREE_TYPE (tem), 1)),
10923 build_int_cst (TREE_TYPE (tem), 0));
10926 /* Fold (X ^ Y) & Y as ~X & Y. */
10927 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10928 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10930 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10931 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10932 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10933 fold_convert_loc (loc, type, arg1));
10935 /* Fold (X ^ Y) & X as ~Y & X. */
10936 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10937 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10938 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10940 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10941 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10942 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10943 fold_convert_loc (loc, type, arg1));
10945 /* Fold X & (X ^ Y) as X & ~Y. */
10946 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10947 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10949 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10950 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10951 fold_convert_loc (loc, type, arg0),
10952 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10954 /* Fold X & (Y ^ X) as ~Y & X. */
10955 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10956 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10957 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10959 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10960 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10961 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10962 fold_convert_loc (loc, type, arg0));
10965 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10966 ((A & N) + B) & M -> (A + B) & M
10967 Similarly if (N & M) == 0,
10968 ((A | N) + B) & M -> (A + B) & M
10969 and for - instead of + (or unary - instead of +)
10970 and/or ^ instead of |.
10971 If B is constant and (B & M) == 0, fold into A & M. */
10972 if (host_integerp (arg1, 1))
10974 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
10975 if (~cst1 && (cst1 & (cst1 + 1)) == 0
10976 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10977 && (TREE_CODE (arg0) == PLUS_EXPR
10978 || TREE_CODE (arg0) == MINUS_EXPR
10979 || TREE_CODE (arg0) == NEGATE_EXPR)
10980 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10981 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10985 unsigned HOST_WIDE_INT cst0;
10987 /* Now we know that arg0 is (C + D) or (C - D) or
10988 -C and arg1 (M) is == (1LL << cst) - 1.
10989 Store C into PMOP[0] and D into PMOP[1]. */
10990 pmop[0] = TREE_OPERAND (arg0, 0);
10992 if (TREE_CODE (arg0) != NEGATE_EXPR)
10994 pmop[1] = TREE_OPERAND (arg0, 1);
10998 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
10999 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11003 for (; which >= 0; which--)
11004 switch (TREE_CODE (pmop[which]))
11009 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11012 /* tree_low_cst not used, because we don't care about
11014 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11016 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11021 else if (cst0 != 0)
11023 /* If C or D is of the form (A & N) where
11024 (N & M) == M, or of the form (A | N) or
11025 (A ^ N) where (N & M) == 0, replace it with A. */
11026 pmop[which] = TREE_OPERAND (pmop[which], 0);
11029 /* If C or D is a N where (N & M) == 0, it can be
11030 omitted (assumed 0). */
11031 if ((TREE_CODE (arg0) == PLUS_EXPR
11032 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11033 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11034 pmop[which] = NULL;
11040 /* Only build anything new if we optimized one or both arguments
11042 if (pmop[0] != TREE_OPERAND (arg0, 0)
11043 || (TREE_CODE (arg0) != NEGATE_EXPR
11044 && pmop[1] != TREE_OPERAND (arg0, 1)))
11046 tree utype = TREE_TYPE (arg0);
11047 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11049 /* Perform the operations in a type that has defined
11050 overflow behavior. */
11051 utype = unsigned_type_for (TREE_TYPE (arg0));
11052 if (pmop[0] != NULL)
11053 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11054 if (pmop[1] != NULL)
11055 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11058 if (TREE_CODE (arg0) == NEGATE_EXPR)
11059 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11060 else if (TREE_CODE (arg0) == PLUS_EXPR)
11062 if (pmop[0] != NULL && pmop[1] != NULL)
11063 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11065 else if (pmop[0] != NULL)
11067 else if (pmop[1] != NULL)
11070 return build_int_cst (type, 0);
11072 else if (pmop[0] == NULL)
11073 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11075 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11077 /* TEM is now the new binary +, - or unary - replacement. */
11078 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11079 fold_convert_loc (loc, utype, arg1));
11080 return fold_convert_loc (loc, type, tem);
11085 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11086 if (t1 != NULL_TREE)
11088 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11089 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11090 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11093 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11095 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11096 && (~TREE_INT_CST_LOW (arg1)
11097 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11099 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11102 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11104 This results in more efficient code for machines without a NOR
11105 instruction. Combine will canonicalize to the first form
11106 which will allow use of NOR instructions provided by the
11107 backend if they exist. */
11108 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11109 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11111 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11112 build2 (BIT_IOR_EXPR, type,
11113 fold_convert_loc (loc, type,
11114 TREE_OPERAND (arg0, 0)),
11115 fold_convert_loc (loc, type,
11116 TREE_OPERAND (arg1, 0))));
11119 /* If arg0 is derived from the address of an object or function, we may
11120 be able to fold this expression using the object or function's
11122 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11124 unsigned HOST_WIDE_INT modulus, residue;
11125 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11127 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11128 integer_onep (arg1));
11130 /* This works because modulus is a power of 2. If this weren't the
11131 case, we'd have to replace it by its greatest power-of-2
11132 divisor: modulus & -modulus. */
11134 return build_int_cst (type, residue & low);
11137 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11138 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11139 if the new mask might be further optimized. */
11140 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11141 || TREE_CODE (arg0) == RSHIFT_EXPR)
11142 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11143 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11144 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11145 < TYPE_PRECISION (TREE_TYPE (arg0))
11146 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11147 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11149 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11150 unsigned HOST_WIDE_INT mask
11151 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11152 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11153 tree shift_type = TREE_TYPE (arg0);
11155 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11156 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11157 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11158 && TYPE_PRECISION (TREE_TYPE (arg0))
11159 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11161 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11162 tree arg00 = TREE_OPERAND (arg0, 0);
11163 /* See if more bits can be proven as zero because of
11165 if (TREE_CODE (arg00) == NOP_EXPR
11166 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11168 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11169 if (TYPE_PRECISION (inner_type)
11170 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11171 && TYPE_PRECISION (inner_type) < prec)
11173 prec = TYPE_PRECISION (inner_type);
11174 /* See if we can shorten the right shift. */
11176 shift_type = inner_type;
11179 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11180 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11181 zerobits <<= prec - shiftc;
11182 /* For arithmetic shift if sign bit could be set, zerobits
11183 can contain actually sign bits, so no transformation is
11184 possible, unless MASK masks them all away. In that
11185 case the shift needs to be converted into logical shift. */
11186 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11187 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11189 if ((mask & zerobits) == 0)
11190 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11196 /* ((X << 16) & 0xff00) is (X, 0). */
11197 if ((mask & zerobits) == mask)
11198 return omit_one_operand_loc (loc, type,
11199 build_int_cst (type, 0), arg0);
11201 newmask = mask | zerobits;
11202 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11206 /* Only do the transformation if NEWMASK is some integer
11208 for (prec = BITS_PER_UNIT;
11209 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11210 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11212 if (prec < HOST_BITS_PER_WIDE_INT
11213 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11217 if (shift_type != TREE_TYPE (arg0))
11219 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11220 fold_convert_loc (loc, shift_type,
11221 TREE_OPERAND (arg0, 0)),
11222 TREE_OPERAND (arg0, 1));
11223 tem = fold_convert_loc (loc, type, tem);
11227 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11228 if (!tree_int_cst_equal (newmaskt, arg1))
11229 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11237 /* Don't touch a floating-point divide by zero unless the mode
11238 of the constant can represent infinity. */
11239 if (TREE_CODE (arg1) == REAL_CST
11240 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11241 && real_zerop (arg1))
11244 /* Optimize A / A to 1.0 if we don't care about
11245 NaNs or Infinities. Skip the transformation
11246 for non-real operands. */
11247 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11248 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11249 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11250 && operand_equal_p (arg0, arg1, 0))
11252 tree r = build_real (TREE_TYPE (arg0), dconst1);
11254 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11257 /* The complex version of the above A / A optimization. */
11258 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11259 && operand_equal_p (arg0, arg1, 0))
11261 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11262 if (! HONOR_NANS (TYPE_MODE (elem_type))
11263 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11265 tree r = build_real (elem_type, dconst1);
11266 /* omit_two_operands will call fold_convert for us. */
11267 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11271 /* (-A) / (-B) -> A / B */
11272 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11273 return fold_build2_loc (loc, RDIV_EXPR, type,
11274 TREE_OPERAND (arg0, 0),
11275 negate_expr (arg1));
11276 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11277 return fold_build2_loc (loc, RDIV_EXPR, type,
11278 negate_expr (arg0),
11279 TREE_OPERAND (arg1, 0));
11281 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11282 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11283 && real_onep (arg1))
11284 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11286 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11287 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11288 && real_minus_onep (arg1))
11289 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11290 negate_expr (arg0)));
11292 /* If ARG1 is a constant, we can convert this to a multiply by the
11293 reciprocal. This does not have the same rounding properties,
11294 so only do this if -freciprocal-math. We can actually
11295 always safely do it if ARG1 is a power of two, but it's hard to
11296 tell if it is or not in a portable manner. */
11297 if (TREE_CODE (arg1) == REAL_CST)
11299 if (flag_reciprocal_math
11300 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11302 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11303 /* Find the reciprocal if optimizing and the result is exact. */
11307 r = TREE_REAL_CST (arg1);
11308 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11310 tem = build_real (type, r);
11311 return fold_build2_loc (loc, MULT_EXPR, type,
11312 fold_convert_loc (loc, type, arg0), tem);
11316 /* Convert A/B/C to A/(B*C). */
11317 if (flag_reciprocal_math
11318 && TREE_CODE (arg0) == RDIV_EXPR)
11319 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11320 fold_build2_loc (loc, MULT_EXPR, type,
11321 TREE_OPERAND (arg0, 1), arg1));
11323 /* Convert A/(B/C) to (A/B)*C. */
11324 if (flag_reciprocal_math
11325 && TREE_CODE (arg1) == RDIV_EXPR)
11326 return fold_build2_loc (loc, MULT_EXPR, type,
11327 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11328 TREE_OPERAND (arg1, 0)),
11329 TREE_OPERAND (arg1, 1));
11331 /* Convert C1/(X*C2) into (C1/C2)/X. */
11332 if (flag_reciprocal_math
11333 && TREE_CODE (arg1) == MULT_EXPR
11334 && TREE_CODE (arg0) == REAL_CST
11335 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11337 tree tem = const_binop (RDIV_EXPR, arg0,
11338 TREE_OPERAND (arg1, 1));
11340 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11341 TREE_OPERAND (arg1, 0));
11344 if (flag_unsafe_math_optimizations)
11346 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11347 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11349 /* Optimize sin(x)/cos(x) as tan(x). */
11350 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11351 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11352 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11353 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11354 CALL_EXPR_ARG (arg1, 0), 0))
11356 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11358 if (tanfn != NULL_TREE)
11359 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11362 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11363 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11364 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11365 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11366 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11367 CALL_EXPR_ARG (arg1, 0), 0))
11369 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11371 if (tanfn != NULL_TREE)
11373 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11374 CALL_EXPR_ARG (arg0, 0));
11375 return fold_build2_loc (loc, RDIV_EXPR, type,
11376 build_real (type, dconst1), tmp);
11380 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11381 NaNs or Infinities. */
11382 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11383 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11384 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11386 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11387 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11389 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11390 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11391 && operand_equal_p (arg00, arg01, 0))
11393 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11395 if (cosfn != NULL_TREE)
11396 return build_call_expr_loc (loc, cosfn, 1, arg00);
11400 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11401 NaNs or Infinities. */
11402 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11403 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11404 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11406 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11407 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11409 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11410 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11411 && operand_equal_p (arg00, arg01, 0))
11413 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11415 if (cosfn != NULL_TREE)
11417 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11418 return fold_build2_loc (loc, RDIV_EXPR, type,
11419 build_real (type, dconst1),
11425 /* Optimize pow(x,c)/x as pow(x,c-1). */
11426 if (fcode0 == BUILT_IN_POW
11427 || fcode0 == BUILT_IN_POWF
11428 || fcode0 == BUILT_IN_POWL)
11430 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11431 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11432 if (TREE_CODE (arg01) == REAL_CST
11433 && !TREE_OVERFLOW (arg01)
11434 && operand_equal_p (arg1, arg00, 0))
11436 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11440 c = TREE_REAL_CST (arg01);
11441 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11442 arg = build_real (type, c);
11443 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11447 /* Optimize a/root(b/c) into a*root(c/b). */
11448 if (BUILTIN_ROOT_P (fcode1))
11450 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11452 if (TREE_CODE (rootarg) == RDIV_EXPR)
11454 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11455 tree b = TREE_OPERAND (rootarg, 0);
11456 tree c = TREE_OPERAND (rootarg, 1);
11458 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11460 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11461 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11465 /* Optimize x/expN(y) into x*expN(-y). */
11466 if (BUILTIN_EXPONENT_P (fcode1))
11468 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11469 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11470 arg1 = build_call_expr_loc (loc,
11472 fold_convert_loc (loc, type, arg));
11473 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11476 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11477 if (fcode1 == BUILT_IN_POW
11478 || fcode1 == BUILT_IN_POWF
11479 || fcode1 == BUILT_IN_POWL)
11481 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11482 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11483 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11484 tree neg11 = fold_convert_loc (loc, type,
11485 negate_expr (arg11));
11486 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11487 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11492 case TRUNC_DIV_EXPR:
11493 /* Optimize (X & (-A)) / A where A is a power of 2,
11495 if (TREE_CODE (arg0) == BIT_AND_EXPR
11496 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11497 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11499 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11500 arg1, TREE_OPERAND (arg0, 1));
11501 if (sum && integer_zerop (sum)) {
11502 unsigned long pow2;
11504 if (TREE_INT_CST_LOW (arg1))
11505 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11507 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11508 + HOST_BITS_PER_WIDE_INT;
11510 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11511 TREE_OPERAND (arg0, 0),
11512 build_int_cst (NULL_TREE, pow2));
11518 case FLOOR_DIV_EXPR:
11519 /* Simplify A / (B << N) where A and B are positive and B is
11520 a power of 2, to A >> (N + log2(B)). */
11521 strict_overflow_p = false;
11522 if (TREE_CODE (arg1) == LSHIFT_EXPR
11523 && (TYPE_UNSIGNED (type)
11524 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11526 tree sval = TREE_OPERAND (arg1, 0);
11527 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11529 tree sh_cnt = TREE_OPERAND (arg1, 1);
11530 unsigned long pow2;
11532 if (TREE_INT_CST_LOW (sval))
11533 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11535 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11536 + HOST_BITS_PER_WIDE_INT;
11538 if (strict_overflow_p)
11539 fold_overflow_warning (("assuming signed overflow does not "
11540 "occur when simplifying A / (B << N)"),
11541 WARN_STRICT_OVERFLOW_MISC);
11543 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11544 sh_cnt, build_int_cst (NULL_TREE, pow2));
11545 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11546 fold_convert_loc (loc, type, arg0), sh_cnt);
11550 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11551 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11552 if (INTEGRAL_TYPE_P (type)
11553 && TYPE_UNSIGNED (type)
11554 && code == FLOOR_DIV_EXPR)
11555 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11559 case ROUND_DIV_EXPR:
11560 case CEIL_DIV_EXPR:
11561 case EXACT_DIV_EXPR:
11562 if (integer_onep (arg1))
11563 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11564 if (integer_zerop (arg1))
11566 /* X / -1 is -X. */
11567 if (!TYPE_UNSIGNED (type)
11568 && TREE_CODE (arg1) == INTEGER_CST
11569 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11570 && TREE_INT_CST_HIGH (arg1) == -1)
11571 return fold_convert_loc (loc, type, negate_expr (arg0));
11573 /* Convert -A / -B to A / B when the type is signed and overflow is
11575 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11576 && TREE_CODE (arg0) == NEGATE_EXPR
11577 && negate_expr_p (arg1))
11579 if (INTEGRAL_TYPE_P (type))
11580 fold_overflow_warning (("assuming signed overflow does not occur "
11581 "when distributing negation across "
11583 WARN_STRICT_OVERFLOW_MISC);
11584 return fold_build2_loc (loc, code, type,
11585 fold_convert_loc (loc, type,
11586 TREE_OPERAND (arg0, 0)),
11587 fold_convert_loc (loc, type,
11588 negate_expr (arg1)));
11590 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11591 && TREE_CODE (arg1) == NEGATE_EXPR
11592 && negate_expr_p (arg0))
11594 if (INTEGRAL_TYPE_P (type))
11595 fold_overflow_warning (("assuming signed overflow does not occur "
11596 "when distributing negation across "
11598 WARN_STRICT_OVERFLOW_MISC);
11599 return fold_build2_loc (loc, code, type,
11600 fold_convert_loc (loc, type,
11601 negate_expr (arg0)),
11602 fold_convert_loc (loc, type,
11603 TREE_OPERAND (arg1, 0)));
11606 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11607 operation, EXACT_DIV_EXPR.
11609 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11610 At one time others generated faster code, it's not clear if they do
11611 after the last round to changes to the DIV code in expmed.c. */
11612 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11613 && multiple_of_p (type, arg0, arg1))
11614 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11616 strict_overflow_p = false;
11617 if (TREE_CODE (arg1) == INTEGER_CST
11618 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11619 &strict_overflow_p)))
11621 if (strict_overflow_p)
11622 fold_overflow_warning (("assuming signed overflow does not occur "
11623 "when simplifying division"),
11624 WARN_STRICT_OVERFLOW_MISC);
11625 return fold_convert_loc (loc, type, tem);
11630 case CEIL_MOD_EXPR:
11631 case FLOOR_MOD_EXPR:
11632 case ROUND_MOD_EXPR:
11633 case TRUNC_MOD_EXPR:
11634 /* X % 1 is always zero, but be sure to preserve any side
11636 if (integer_onep (arg1))
11637 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11639 /* X % 0, return X % 0 unchanged so that we can get the
11640 proper warnings and errors. */
11641 if (integer_zerop (arg1))
11644 /* 0 % X is always zero, but be sure to preserve any side
11645 effects in X. Place this after checking for X == 0. */
11646 if (integer_zerop (arg0))
11647 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11649 /* X % -1 is zero. */
11650 if (!TYPE_UNSIGNED (type)
11651 && TREE_CODE (arg1) == INTEGER_CST
11652 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11653 && TREE_INT_CST_HIGH (arg1) == -1)
11654 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11656 /* X % -C is the same as X % C. */
11657 if (code == TRUNC_MOD_EXPR
11658 && !TYPE_UNSIGNED (type)
11659 && TREE_CODE (arg1) == INTEGER_CST
11660 && !TREE_OVERFLOW (arg1)
11661 && TREE_INT_CST_HIGH (arg1) < 0
11662 && !TYPE_OVERFLOW_TRAPS (type)
11663 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11664 && !sign_bit_p (arg1, arg1))
11665 return fold_build2_loc (loc, code, type,
11666 fold_convert_loc (loc, type, arg0),
11667 fold_convert_loc (loc, type,
11668 negate_expr (arg1)));
11670 /* X % -Y is the same as X % Y. */
11671 if (code == TRUNC_MOD_EXPR
11672 && !TYPE_UNSIGNED (type)
11673 && TREE_CODE (arg1) == NEGATE_EXPR
11674 && !TYPE_OVERFLOW_TRAPS (type))
11675 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11676 fold_convert_loc (loc, type,
11677 TREE_OPERAND (arg1, 0)));
11679 strict_overflow_p = false;
11680 if (TREE_CODE (arg1) == INTEGER_CST
11681 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11682 &strict_overflow_p)))
11684 if (strict_overflow_p)
11685 fold_overflow_warning (("assuming signed overflow does not occur "
11686 "when simplifying modulus"),
11687 WARN_STRICT_OVERFLOW_MISC);
11688 return fold_convert_loc (loc, type, tem);
11691 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11692 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11693 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11694 && (TYPE_UNSIGNED (type)
11695 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11698 /* Also optimize A % (C << N) where C is a power of 2,
11699 to A & ((C << N) - 1). */
11700 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11701 c = TREE_OPERAND (arg1, 0);
11703 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11706 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11707 build_int_cst (TREE_TYPE (arg1), 1));
11708 if (strict_overflow_p)
11709 fold_overflow_warning (("assuming signed overflow does not "
11710 "occur when simplifying "
11711 "X % (power of two)"),
11712 WARN_STRICT_OVERFLOW_MISC);
11713 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11714 fold_convert_loc (loc, type, arg0),
11715 fold_convert_loc (loc, type, mask));
11723 if (integer_all_onesp (arg0))
11724 return omit_one_operand_loc (loc, type, arg0, arg1);
11728 /* Optimize -1 >> x for arithmetic right shifts. */
11729 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11730 && tree_expr_nonnegative_p (arg1))
11731 return omit_one_operand_loc (loc, type, arg0, arg1);
11732 /* ... fall through ... */
11736 if (integer_zerop (arg1))
11737 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11738 if (integer_zerop (arg0))
11739 return omit_one_operand_loc (loc, type, arg0, arg1);
11741 /* Since negative shift count is not well-defined,
11742 don't try to compute it in the compiler. */
11743 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11746 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11747 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11748 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11749 && host_integerp (TREE_OPERAND (arg0, 1), false)
11750 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11752 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11753 + TREE_INT_CST_LOW (arg1));
11755 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11756 being well defined. */
11757 if (low >= TYPE_PRECISION (type))
11759 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11760 low = low % TYPE_PRECISION (type);
11761 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11762 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11763 TREE_OPERAND (arg0, 0));
11765 low = TYPE_PRECISION (type) - 1;
11768 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11769 build_int_cst (type, low));
11772 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11773 into x & ((unsigned)-1 >> c) for unsigned types. */
11774 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11775 || (TYPE_UNSIGNED (type)
11776 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11777 && host_integerp (arg1, false)
11778 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11779 && host_integerp (TREE_OPERAND (arg0, 1), false)
11780 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11782 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11783 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11789 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11791 lshift = build_int_cst (type, -1);
11792 lshift = int_const_binop (code, lshift, arg1, 0);
11794 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11798 /* Rewrite an LROTATE_EXPR by a constant into an
11799 RROTATE_EXPR by a new constant. */
11800 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11802 tree tem = build_int_cst (TREE_TYPE (arg1),
11803 TYPE_PRECISION (type));
11804 tem = const_binop (MINUS_EXPR, tem, arg1);
11805 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11808 /* If we have a rotate of a bit operation with the rotate count and
11809 the second operand of the bit operation both constant,
11810 permute the two operations. */
11811 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11812 && (TREE_CODE (arg0) == BIT_AND_EXPR
11813 || TREE_CODE (arg0) == BIT_IOR_EXPR
11814 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11815 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11816 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11817 fold_build2_loc (loc, code, type,
11818 TREE_OPERAND (arg0, 0), arg1),
11819 fold_build2_loc (loc, code, type,
11820 TREE_OPERAND (arg0, 1), arg1));
11822 /* Two consecutive rotates adding up to the precision of the
11823 type can be ignored. */
11824 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11825 && TREE_CODE (arg0) == RROTATE_EXPR
11826 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11827 && TREE_INT_CST_HIGH (arg1) == 0
11828 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11829 && ((TREE_INT_CST_LOW (arg1)
11830 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11831 == (unsigned int) TYPE_PRECISION (type)))
11832 return TREE_OPERAND (arg0, 0);
11834 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11835 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11836 if the latter can be further optimized. */
11837 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11838 && TREE_CODE (arg0) == BIT_AND_EXPR
11839 && TREE_CODE (arg1) == INTEGER_CST
11840 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11842 tree mask = fold_build2_loc (loc, code, type,
11843 fold_convert_loc (loc, type,
11844 TREE_OPERAND (arg0, 1)),
11846 tree shift = fold_build2_loc (loc, code, type,
11847 fold_convert_loc (loc, type,
11848 TREE_OPERAND (arg0, 0)),
11850 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11858 if (operand_equal_p (arg0, arg1, 0))
11859 return omit_one_operand_loc (loc, type, arg0, arg1);
11860 if (INTEGRAL_TYPE_P (type)
11861 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11862 return omit_one_operand_loc (loc, type, arg1, arg0);
11863 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11869 if (operand_equal_p (arg0, arg1, 0))
11870 return omit_one_operand_loc (loc, type, arg0, arg1);
11871 if (INTEGRAL_TYPE_P (type)
11872 && TYPE_MAX_VALUE (type)
11873 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11874 return omit_one_operand_loc (loc, type, arg1, arg0);
11875 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11880 case TRUTH_ANDIF_EXPR:
11881 /* Note that the operands of this must be ints
11882 and their values must be 0 or 1.
11883 ("true" is a fixed value perhaps depending on the language.) */
11884 /* If first arg is constant zero, return it. */
11885 if (integer_zerop (arg0))
11886 return fold_convert_loc (loc, type, arg0);
11887 case TRUTH_AND_EXPR:
11888 /* If either arg is constant true, drop it. */
11889 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11890 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11891 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11892 /* Preserve sequence points. */
11893 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11894 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11895 /* If second arg is constant zero, result is zero, but first arg
11896 must be evaluated. */
11897 if (integer_zerop (arg1))
11898 return omit_one_operand_loc (loc, type, arg1, arg0);
11899 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11900 case will be handled here. */
11901 if (integer_zerop (arg0))
11902 return omit_one_operand_loc (loc, type, arg0, arg1);
11904 /* !X && X is always false. */
11905 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11906 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11907 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11908 /* X && !X is always false. */
11909 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11910 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11911 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11913 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11914 means A >= Y && A != MAX, but in this case we know that
11917 if (!TREE_SIDE_EFFECTS (arg0)
11918 && !TREE_SIDE_EFFECTS (arg1))
11920 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11921 if (tem && !operand_equal_p (tem, arg0, 0))
11922 return fold_build2_loc (loc, code, type, tem, arg1);
11924 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11925 if (tem && !operand_equal_p (tem, arg1, 0))
11926 return fold_build2_loc (loc, code, type, arg0, tem);
11930 /* We only do these simplifications if we are optimizing. */
11934 /* Check for things like (A || B) && (A || C). We can convert this
11935 to A || (B && C). Note that either operator can be any of the four
11936 truth and/or operations and the transformation will still be
11937 valid. Also note that we only care about order for the
11938 ANDIF and ORIF operators. If B contains side effects, this
11939 might change the truth-value of A. */
11940 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11941 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11942 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11943 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11944 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11945 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11947 tree a00 = TREE_OPERAND (arg0, 0);
11948 tree a01 = TREE_OPERAND (arg0, 1);
11949 tree a10 = TREE_OPERAND (arg1, 0);
11950 tree a11 = TREE_OPERAND (arg1, 1);
11951 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11952 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11953 && (code == TRUTH_AND_EXPR
11954 || code == TRUTH_OR_EXPR));
11956 if (operand_equal_p (a00, a10, 0))
11957 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11958 fold_build2_loc (loc, code, type, a01, a11));
11959 else if (commutative && operand_equal_p (a00, a11, 0))
11960 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11961 fold_build2_loc (loc, code, type, a01, a10));
11962 else if (commutative && operand_equal_p (a01, a10, 0))
11963 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11964 fold_build2_loc (loc, code, type, a00, a11));
11966 /* This case if tricky because we must either have commutative
11967 operators or else A10 must not have side-effects. */
11969 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11970 && operand_equal_p (a01, a11, 0))
11971 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11972 fold_build2_loc (loc, code, type, a00, a10),
11976 /* See if we can build a range comparison. */
11977 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11980 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11981 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11983 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11985 return fold_build2_loc (loc, code, type, tem, arg1);
11988 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11989 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11991 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11993 return fold_build2_loc (loc, code, type, arg0, tem);
11996 /* Check for the possibility of merging component references. If our
11997 lhs is another similar operation, try to merge its rhs with our
11998 rhs. Then try to merge our lhs and rhs. */
11999 if (TREE_CODE (arg0) == code
12000 && 0 != (tem = fold_truthop (loc, code, type,
12001 TREE_OPERAND (arg0, 1), arg1)))
12002 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12004 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12009 case TRUTH_ORIF_EXPR:
12010 /* Note that the operands of this must be ints
12011 and their values must be 0 or true.
12012 ("true" is a fixed value perhaps depending on the language.) */
12013 /* If first arg is constant true, return it. */
12014 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12015 return fold_convert_loc (loc, type, arg0);
12016 case TRUTH_OR_EXPR:
12017 /* If either arg is constant zero, drop it. */
12018 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12019 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12020 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12021 /* Preserve sequence points. */
12022 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12023 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12024 /* If second arg is constant true, result is true, but we must
12025 evaluate first arg. */
12026 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12027 return omit_one_operand_loc (loc, type, arg1, arg0);
12028 /* Likewise for first arg, but note this only occurs here for
12030 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12031 return omit_one_operand_loc (loc, type, arg0, arg1);
12033 /* !X || X is always true. */
12034 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12035 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12036 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12037 /* X || !X is always true. */
12038 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12039 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12040 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12044 case TRUTH_XOR_EXPR:
12045 /* If the second arg is constant zero, drop it. */
12046 if (integer_zerop (arg1))
12047 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12048 /* If the second arg is constant true, this is a logical inversion. */
12049 if (integer_onep (arg1))
12051 /* Only call invert_truthvalue if operand is a truth value. */
12052 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12053 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12055 tem = invert_truthvalue_loc (loc, arg0);
12056 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12058 /* Identical arguments cancel to zero. */
12059 if (operand_equal_p (arg0, arg1, 0))
12060 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12062 /* !X ^ X is always true. */
12063 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12064 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12065 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12067 /* X ^ !X is always true. */
12068 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12069 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12070 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12079 tem = fold_comparison (loc, code, type, op0, op1);
12080 if (tem != NULL_TREE)
12083 /* bool_var != 0 becomes bool_var. */
12084 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12085 && code == NE_EXPR)
12086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12088 /* bool_var == 1 becomes bool_var. */
12089 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12090 && code == EQ_EXPR)
12091 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12093 /* bool_var != 1 becomes !bool_var. */
12094 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12095 && code == NE_EXPR)
12096 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12097 fold_convert_loc (loc, type, arg0));
12099 /* bool_var == 0 becomes !bool_var. */
12100 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12101 && code == EQ_EXPR)
12102 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12103 fold_convert_loc (loc, type, arg0));
12105 /* !exp != 0 becomes !exp */
12106 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12107 && code == NE_EXPR)
12108 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12110 /* If this is an equality comparison of the address of two non-weak,
12111 unaliased symbols neither of which are extern (since we do not
12112 have access to attributes for externs), then we know the result. */
12113 if (TREE_CODE (arg0) == ADDR_EXPR
12114 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12115 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12116 && ! lookup_attribute ("alias",
12117 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12118 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12119 && TREE_CODE (arg1) == ADDR_EXPR
12120 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12121 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12122 && ! lookup_attribute ("alias",
12123 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12124 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12126 /* We know that we're looking at the address of two
12127 non-weak, unaliased, static _DECL nodes.
12129 It is both wasteful and incorrect to call operand_equal_p
12130 to compare the two ADDR_EXPR nodes. It is wasteful in that
12131 all we need to do is test pointer equality for the arguments
12132 to the two ADDR_EXPR nodes. It is incorrect to use
12133 operand_equal_p as that function is NOT equivalent to a
12134 C equality test. It can in fact return false for two
12135 objects which would test as equal using the C equality
12137 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12138 return constant_boolean_node (equal
12139 ? code == EQ_EXPR : code != EQ_EXPR,
12143 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12144 a MINUS_EXPR of a constant, we can convert it into a comparison with
12145 a revised constant as long as no overflow occurs. */
12146 if (TREE_CODE (arg1) == INTEGER_CST
12147 && (TREE_CODE (arg0) == PLUS_EXPR
12148 || TREE_CODE (arg0) == MINUS_EXPR)
12149 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12150 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12151 ? MINUS_EXPR : PLUS_EXPR,
12152 fold_convert_loc (loc, TREE_TYPE (arg0),
12154 TREE_OPERAND (arg0, 1)))
12155 && !TREE_OVERFLOW (tem))
12156 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12158 /* Similarly for a NEGATE_EXPR. */
12159 if (TREE_CODE (arg0) == NEGATE_EXPR
12160 && TREE_CODE (arg1) == INTEGER_CST
12161 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12163 && TREE_CODE (tem) == INTEGER_CST
12164 && !TREE_OVERFLOW (tem))
12165 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12167 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12168 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12169 && TREE_CODE (arg1) == INTEGER_CST
12170 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12171 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12172 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12173 fold_convert_loc (loc,
12176 TREE_OPERAND (arg0, 1)));
12178 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12179 if ((TREE_CODE (arg0) == PLUS_EXPR
12180 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12181 || TREE_CODE (arg0) == MINUS_EXPR)
12182 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12185 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12186 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12188 tree val = TREE_OPERAND (arg0, 1);
12189 return omit_two_operands_loc (loc, type,
12190 fold_build2_loc (loc, code, type,
12192 build_int_cst (TREE_TYPE (val),
12194 TREE_OPERAND (arg0, 0), arg1);
12197 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12198 if (TREE_CODE (arg0) == MINUS_EXPR
12199 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12200 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12203 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12205 return omit_two_operands_loc (loc, type,
12207 ? boolean_true_node : boolean_false_node,
12208 TREE_OPERAND (arg0, 1), arg1);
12211 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12212 for !=. Don't do this for ordered comparisons due to overflow. */
12213 if (TREE_CODE (arg0) == MINUS_EXPR
12214 && integer_zerop (arg1))
12215 return fold_build2_loc (loc, code, type,
12216 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12218 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12219 if (TREE_CODE (arg0) == ABS_EXPR
12220 && (integer_zerop (arg1) || real_zerop (arg1)))
12221 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12223 /* If this is an EQ or NE comparison with zero and ARG0 is
12224 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12225 two operations, but the latter can be done in one less insn
12226 on machines that have only two-operand insns or on which a
12227 constant cannot be the first operand. */
12228 if (TREE_CODE (arg0) == BIT_AND_EXPR
12229 && integer_zerop (arg1))
12231 tree arg00 = TREE_OPERAND (arg0, 0);
12232 tree arg01 = TREE_OPERAND (arg0, 1);
12233 if (TREE_CODE (arg00) == LSHIFT_EXPR
12234 && integer_onep (TREE_OPERAND (arg00, 0)))
12236 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12237 arg01, TREE_OPERAND (arg00, 1));
12238 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12239 build_int_cst (TREE_TYPE (arg0), 1));
12240 return fold_build2_loc (loc, code, type,
12241 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12244 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12245 && integer_onep (TREE_OPERAND (arg01, 0)))
12247 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12248 arg00, TREE_OPERAND (arg01, 1));
12249 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12250 build_int_cst (TREE_TYPE (arg0), 1));
12251 return fold_build2_loc (loc, code, type,
12252 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12257 /* If this is an NE or EQ comparison of zero against the result of a
12258 signed MOD operation whose second operand is a power of 2, make
12259 the MOD operation unsigned since it is simpler and equivalent. */
12260 if (integer_zerop (arg1)
12261 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12262 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12263 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12264 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12265 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12266 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12268 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12269 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12270 fold_convert_loc (loc, newtype,
12271 TREE_OPERAND (arg0, 0)),
12272 fold_convert_loc (loc, newtype,
12273 TREE_OPERAND (arg0, 1)));
12275 return fold_build2_loc (loc, code, type, newmod,
12276 fold_convert_loc (loc, newtype, arg1));
12279 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12280 C1 is a valid shift constant, and C2 is a power of two, i.e.
12282 if (TREE_CODE (arg0) == BIT_AND_EXPR
12283 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12284 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12286 && integer_pow2p (TREE_OPERAND (arg0, 1))
12287 && integer_zerop (arg1))
12289 tree itype = TREE_TYPE (arg0);
12290 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12291 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12293 /* Check for a valid shift count. */
12294 if (TREE_INT_CST_HIGH (arg001) == 0
12295 && TREE_INT_CST_LOW (arg001) < prec)
12297 tree arg01 = TREE_OPERAND (arg0, 1);
12298 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12299 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12300 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12301 can be rewritten as (X & (C2 << C1)) != 0. */
12302 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12304 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12305 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12306 return fold_build2_loc (loc, code, type, tem,
12307 fold_convert_loc (loc, itype, arg1));
12309 /* Otherwise, for signed (arithmetic) shifts,
12310 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12311 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12312 else if (!TYPE_UNSIGNED (itype))
12313 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12314 arg000, build_int_cst (itype, 0));
12315 /* Otherwise, of unsigned (logical) shifts,
12316 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12317 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12319 return omit_one_operand_loc (loc, type,
12320 code == EQ_EXPR ? integer_one_node
12321 : integer_zero_node,
12326 /* If this is an NE comparison of zero with an AND of one, remove the
12327 comparison since the AND will give the correct value. */
12328 if (code == NE_EXPR
12329 && integer_zerop (arg1)
12330 && TREE_CODE (arg0) == BIT_AND_EXPR
12331 && integer_onep (TREE_OPERAND (arg0, 1)))
12332 return fold_convert_loc (loc, type, arg0);
12334 /* If we have (A & C) == C where C is a power of 2, convert this into
12335 (A & C) != 0. Similarly for NE_EXPR. */
12336 if (TREE_CODE (arg0) == BIT_AND_EXPR
12337 && integer_pow2p (TREE_OPERAND (arg0, 1))
12338 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12339 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12340 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12341 integer_zero_node));
12343 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12344 bit, then fold the expression into A < 0 or A >= 0. */
12345 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12349 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12350 Similarly for NE_EXPR. */
12351 if (TREE_CODE (arg0) == BIT_AND_EXPR
12352 && TREE_CODE (arg1) == INTEGER_CST
12353 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12355 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12356 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12357 TREE_OPERAND (arg0, 1));
12359 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12360 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12362 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12363 if (integer_nonzerop (dandnotc))
12364 return omit_one_operand_loc (loc, type, rslt, arg0);
12367 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12368 Similarly for NE_EXPR. */
12369 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12370 && TREE_CODE (arg1) == INTEGER_CST
12371 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12373 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12375 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12376 TREE_OPERAND (arg0, 1),
12377 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12378 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12379 if (integer_nonzerop (candnotd))
12380 return omit_one_operand_loc (loc, type, rslt, arg0);
12383 /* If this is a comparison of a field, we may be able to simplify it. */
12384 if ((TREE_CODE (arg0) == COMPONENT_REF
12385 || TREE_CODE (arg0) == BIT_FIELD_REF)
12386 /* Handle the constant case even without -O
12387 to make sure the warnings are given. */
12388 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12390 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12395 /* Optimize comparisons of strlen vs zero to a compare of the
12396 first character of the string vs zero. To wit,
12397 strlen(ptr) == 0 => *ptr == 0
12398 strlen(ptr) != 0 => *ptr != 0
12399 Other cases should reduce to one of these two (or a constant)
12400 due to the return value of strlen being unsigned. */
12401 if (TREE_CODE (arg0) == CALL_EXPR
12402 && integer_zerop (arg1))
12404 tree fndecl = get_callee_fndecl (arg0);
12407 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12408 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12409 && call_expr_nargs (arg0) == 1
12410 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12412 tree iref = build_fold_indirect_ref_loc (loc,
12413 CALL_EXPR_ARG (arg0, 0));
12414 return fold_build2_loc (loc, code, type, iref,
12415 build_int_cst (TREE_TYPE (iref), 0));
12419 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12420 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12421 if (TREE_CODE (arg0) == RSHIFT_EXPR
12422 && integer_zerop (arg1)
12423 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12425 tree arg00 = TREE_OPERAND (arg0, 0);
12426 tree arg01 = TREE_OPERAND (arg0, 1);
12427 tree itype = TREE_TYPE (arg00);
12428 if (TREE_INT_CST_HIGH (arg01) == 0
12429 && TREE_INT_CST_LOW (arg01)
12430 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12432 if (TYPE_UNSIGNED (itype))
12434 itype = signed_type_for (itype);
12435 arg00 = fold_convert_loc (loc, itype, arg00);
12437 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12438 type, arg00, build_int_cst (itype, 0));
12442 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12443 if (integer_zerop (arg1)
12444 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12445 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12446 TREE_OPERAND (arg0, 1));
12448 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12449 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12450 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12451 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12452 build_int_cst (TREE_TYPE (arg0), 0));
12453 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12454 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12455 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12456 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12457 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12458 build_int_cst (TREE_TYPE (arg0), 0));
12460 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12461 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12462 && TREE_CODE (arg1) == INTEGER_CST
12463 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12464 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12465 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12466 TREE_OPERAND (arg0, 1), arg1));
12468 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12469 (X & C) == 0 when C is a single bit. */
12470 if (TREE_CODE (arg0) == BIT_AND_EXPR
12471 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12472 && integer_zerop (arg1)
12473 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12475 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12476 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12477 TREE_OPERAND (arg0, 1));
12478 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12480 fold_convert_loc (loc, TREE_TYPE (arg0),
12484 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12485 constant C is a power of two, i.e. a single bit. */
12486 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12487 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12488 && integer_zerop (arg1)
12489 && integer_pow2p (TREE_OPERAND (arg0, 1))
12490 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12491 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12493 tree arg00 = TREE_OPERAND (arg0, 0);
12494 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12495 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12498 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12499 when is C is a power of two, i.e. a single bit. */
12500 if (TREE_CODE (arg0) == BIT_AND_EXPR
12501 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12502 && integer_zerop (arg1)
12503 && integer_pow2p (TREE_OPERAND (arg0, 1))
12504 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12505 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12507 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12508 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12509 arg000, TREE_OPERAND (arg0, 1));
12510 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12511 tem, build_int_cst (TREE_TYPE (tem), 0));
12514 if (integer_zerop (arg1)
12515 && tree_expr_nonzero_p (arg0))
12517 tree res = constant_boolean_node (code==NE_EXPR, type);
12518 return omit_one_operand_loc (loc, type, res, arg0);
12521 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12522 if (TREE_CODE (arg0) == NEGATE_EXPR
12523 && TREE_CODE (arg1) == NEGATE_EXPR)
12524 return fold_build2_loc (loc, code, type,
12525 TREE_OPERAND (arg0, 0),
12526 fold_convert_loc (loc, TREE_TYPE (arg0),
12527 TREE_OPERAND (arg1, 0)));
12529 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12530 if (TREE_CODE (arg0) == BIT_AND_EXPR
12531 && TREE_CODE (arg1) == BIT_AND_EXPR)
12533 tree arg00 = TREE_OPERAND (arg0, 0);
12534 tree arg01 = TREE_OPERAND (arg0, 1);
12535 tree arg10 = TREE_OPERAND (arg1, 0);
12536 tree arg11 = TREE_OPERAND (arg1, 1);
12537 tree itype = TREE_TYPE (arg0);
12539 if (operand_equal_p (arg01, arg11, 0))
12540 return fold_build2_loc (loc, code, type,
12541 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12542 fold_build2_loc (loc,
12543 BIT_XOR_EXPR, itype,
12546 build_int_cst (itype, 0));
12548 if (operand_equal_p (arg01, arg10, 0))
12549 return fold_build2_loc (loc, code, type,
12550 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12551 fold_build2_loc (loc,
12552 BIT_XOR_EXPR, itype,
12555 build_int_cst (itype, 0));
12557 if (operand_equal_p (arg00, arg11, 0))
12558 return fold_build2_loc (loc, code, type,
12559 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12560 fold_build2_loc (loc,
12561 BIT_XOR_EXPR, itype,
12564 build_int_cst (itype, 0));
12566 if (operand_equal_p (arg00, arg10, 0))
12567 return fold_build2_loc (loc, code, type,
12568 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12569 fold_build2_loc (loc,
12570 BIT_XOR_EXPR, itype,
12573 build_int_cst (itype, 0));
12576 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12577 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12579 tree arg00 = TREE_OPERAND (arg0, 0);
12580 tree arg01 = TREE_OPERAND (arg0, 1);
12581 tree arg10 = TREE_OPERAND (arg1, 0);
12582 tree arg11 = TREE_OPERAND (arg1, 1);
12583 tree itype = TREE_TYPE (arg0);
12585 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12586 operand_equal_p guarantees no side-effects so we don't need
12587 to use omit_one_operand on Z. */
12588 if (operand_equal_p (arg01, arg11, 0))
12589 return fold_build2_loc (loc, code, type, arg00,
12590 fold_convert_loc (loc, TREE_TYPE (arg00),
12592 if (operand_equal_p (arg01, arg10, 0))
12593 return fold_build2_loc (loc, code, type, arg00,
12594 fold_convert_loc (loc, TREE_TYPE (arg00),
12596 if (operand_equal_p (arg00, arg11, 0))
12597 return fold_build2_loc (loc, code, type, arg01,
12598 fold_convert_loc (loc, TREE_TYPE (arg01),
12600 if (operand_equal_p (arg00, arg10, 0))
12601 return fold_build2_loc (loc, code, type, arg01,
12602 fold_convert_loc (loc, TREE_TYPE (arg01),
12605 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12606 if (TREE_CODE (arg01) == INTEGER_CST
12607 && TREE_CODE (arg11) == INTEGER_CST)
12609 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12610 fold_convert_loc (loc, itype, arg11));
12611 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12612 return fold_build2_loc (loc, code, type, tem,
12613 fold_convert_loc (loc, itype, arg10));
12617 /* Attempt to simplify equality/inequality comparisons of complex
12618 values. Only lower the comparison if the result is known or
12619 can be simplified to a single scalar comparison. */
12620 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12621 || TREE_CODE (arg0) == COMPLEX_CST)
12622 && (TREE_CODE (arg1) == COMPLEX_EXPR
12623 || TREE_CODE (arg1) == COMPLEX_CST))
12625 tree real0, imag0, real1, imag1;
12628 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12630 real0 = TREE_OPERAND (arg0, 0);
12631 imag0 = TREE_OPERAND (arg0, 1);
12635 real0 = TREE_REALPART (arg0);
12636 imag0 = TREE_IMAGPART (arg0);
12639 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12641 real1 = TREE_OPERAND (arg1, 0);
12642 imag1 = TREE_OPERAND (arg1, 1);
12646 real1 = TREE_REALPART (arg1);
12647 imag1 = TREE_IMAGPART (arg1);
12650 rcond = fold_binary_loc (loc, code, type, real0, real1);
12651 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12653 if (integer_zerop (rcond))
12655 if (code == EQ_EXPR)
12656 return omit_two_operands_loc (loc, type, boolean_false_node,
12658 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12662 if (code == NE_EXPR)
12663 return omit_two_operands_loc (loc, type, boolean_true_node,
12665 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12669 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12670 if (icond && TREE_CODE (icond) == INTEGER_CST)
12672 if (integer_zerop (icond))
12674 if (code == EQ_EXPR)
12675 return omit_two_operands_loc (loc, type, boolean_false_node,
12677 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12681 if (code == NE_EXPR)
12682 return omit_two_operands_loc (loc, type, boolean_true_node,
12684 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12695 tem = fold_comparison (loc, code, type, op0, op1);
12696 if (tem != NULL_TREE)
12699 /* Transform comparisons of the form X +- C CMP X. */
12700 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12701 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12702 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12703 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12704 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12705 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12707 tree arg01 = TREE_OPERAND (arg0, 1);
12708 enum tree_code code0 = TREE_CODE (arg0);
12711 if (TREE_CODE (arg01) == REAL_CST)
12712 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12714 is_positive = tree_int_cst_sgn (arg01);
12716 /* (X - c) > X becomes false. */
12717 if (code == GT_EXPR
12718 && ((code0 == MINUS_EXPR && is_positive >= 0)
12719 || (code0 == PLUS_EXPR && is_positive <= 0)))
12721 if (TREE_CODE (arg01) == INTEGER_CST
12722 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12723 fold_overflow_warning (("assuming signed overflow does not "
12724 "occur when assuming that (X - c) > X "
12725 "is always false"),
12726 WARN_STRICT_OVERFLOW_ALL);
12727 return constant_boolean_node (0, type);
12730 /* Likewise (X + c) < X becomes false. */
12731 if (code == LT_EXPR
12732 && ((code0 == PLUS_EXPR && is_positive >= 0)
12733 || (code0 == MINUS_EXPR && is_positive <= 0)))
12735 if (TREE_CODE (arg01) == INTEGER_CST
12736 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12737 fold_overflow_warning (("assuming signed overflow does not "
12738 "occur when assuming that "
12739 "(X + c) < X is always false"),
12740 WARN_STRICT_OVERFLOW_ALL);
12741 return constant_boolean_node (0, type);
12744 /* Convert (X - c) <= X to true. */
12745 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12747 && ((code0 == MINUS_EXPR && is_positive >= 0)
12748 || (code0 == PLUS_EXPR && is_positive <= 0)))
12750 if (TREE_CODE (arg01) == INTEGER_CST
12751 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12752 fold_overflow_warning (("assuming signed overflow does not "
12753 "occur when assuming that "
12754 "(X - c) <= X is always true"),
12755 WARN_STRICT_OVERFLOW_ALL);
12756 return constant_boolean_node (1, type);
12759 /* Convert (X + c) >= X to true. */
12760 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12762 && ((code0 == PLUS_EXPR && is_positive >= 0)
12763 || (code0 == MINUS_EXPR && is_positive <= 0)))
12765 if (TREE_CODE (arg01) == INTEGER_CST
12766 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12767 fold_overflow_warning (("assuming signed overflow does not "
12768 "occur when assuming that "
12769 "(X + c) >= X is always true"),
12770 WARN_STRICT_OVERFLOW_ALL);
12771 return constant_boolean_node (1, type);
12774 if (TREE_CODE (arg01) == INTEGER_CST)
12776 /* Convert X + c > X and X - c < X to true for integers. */
12777 if (code == GT_EXPR
12778 && ((code0 == PLUS_EXPR && is_positive > 0)
12779 || (code0 == MINUS_EXPR && is_positive < 0)))
12781 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12782 fold_overflow_warning (("assuming signed overflow does "
12783 "not occur when assuming that "
12784 "(X + c) > X is always true"),
12785 WARN_STRICT_OVERFLOW_ALL);
12786 return constant_boolean_node (1, type);
12789 if (code == LT_EXPR
12790 && ((code0 == MINUS_EXPR && is_positive > 0)
12791 || (code0 == PLUS_EXPR && is_positive < 0)))
12793 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12794 fold_overflow_warning (("assuming signed overflow does "
12795 "not occur when assuming that "
12796 "(X - c) < X is always true"),
12797 WARN_STRICT_OVERFLOW_ALL);
12798 return constant_boolean_node (1, type);
12801 /* Convert X + c <= X and X - c >= X to false for integers. */
12802 if (code == LE_EXPR
12803 && ((code0 == PLUS_EXPR && is_positive > 0)
12804 || (code0 == MINUS_EXPR && is_positive < 0)))
12806 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12807 fold_overflow_warning (("assuming signed overflow does "
12808 "not occur when assuming that "
12809 "(X + c) <= X is always false"),
12810 WARN_STRICT_OVERFLOW_ALL);
12811 return constant_boolean_node (0, type);
12814 if (code == GE_EXPR
12815 && ((code0 == MINUS_EXPR && is_positive > 0)
12816 || (code0 == PLUS_EXPR && is_positive < 0)))
12818 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12819 fold_overflow_warning (("assuming signed overflow does "
12820 "not occur when assuming that "
12821 "(X - c) >= X is always false"),
12822 WARN_STRICT_OVERFLOW_ALL);
12823 return constant_boolean_node (0, type);
12828 /* Comparisons with the highest or lowest possible integer of
12829 the specified precision will have known values. */
12831 tree arg1_type = TREE_TYPE (arg1);
12832 unsigned int width = TYPE_PRECISION (arg1_type);
12834 if (TREE_CODE (arg1) == INTEGER_CST
12835 && width <= 2 * HOST_BITS_PER_WIDE_INT
12836 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12838 HOST_WIDE_INT signed_max_hi;
12839 unsigned HOST_WIDE_INT signed_max_lo;
12840 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12842 if (width <= HOST_BITS_PER_WIDE_INT)
12844 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12849 if (TYPE_UNSIGNED (arg1_type))
12851 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12857 max_lo = signed_max_lo;
12858 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12864 width -= HOST_BITS_PER_WIDE_INT;
12865 signed_max_lo = -1;
12866 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12871 if (TYPE_UNSIGNED (arg1_type))
12873 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12878 max_hi = signed_max_hi;
12879 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12883 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12884 && TREE_INT_CST_LOW (arg1) == max_lo)
12888 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12891 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12894 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12897 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12899 /* The GE_EXPR and LT_EXPR cases above are not normally
12900 reached because of previous transformations. */
12905 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12907 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12911 arg1 = const_binop (PLUS_EXPR, arg1,
12912 build_int_cst (TREE_TYPE (arg1), 1));
12913 return fold_build2_loc (loc, EQ_EXPR, type,
12914 fold_convert_loc (loc,
12915 TREE_TYPE (arg1), arg0),
12918 arg1 = const_binop (PLUS_EXPR, arg1,
12919 build_int_cst (TREE_TYPE (arg1), 1));
12920 return fold_build2_loc (loc, NE_EXPR, type,
12921 fold_convert_loc (loc, TREE_TYPE (arg1),
12927 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12929 && TREE_INT_CST_LOW (arg1) == min_lo)
12933 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12936 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12939 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12942 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12947 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12949 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12953 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12954 return fold_build2_loc (loc, NE_EXPR, type,
12955 fold_convert_loc (loc,
12956 TREE_TYPE (arg1), arg0),
12959 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12960 return fold_build2_loc (loc, EQ_EXPR, type,
12961 fold_convert_loc (loc, TREE_TYPE (arg1),
12968 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12969 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12970 && TYPE_UNSIGNED (arg1_type)
12971 /* We will flip the signedness of the comparison operator
12972 associated with the mode of arg1, so the sign bit is
12973 specified by this mode. Check that arg1 is the signed
12974 max associated with this sign bit. */
12975 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12976 /* signed_type does not work on pointer types. */
12977 && INTEGRAL_TYPE_P (arg1_type))
12979 /* The following case also applies to X < signed_max+1
12980 and X >= signed_max+1 because previous transformations. */
12981 if (code == LE_EXPR || code == GT_EXPR)
12984 st = signed_type_for (TREE_TYPE (arg1));
12985 return fold_build2_loc (loc,
12986 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12987 type, fold_convert_loc (loc, st, arg0),
12988 build_int_cst (st, 0));
12994 /* If we are comparing an ABS_EXPR with a constant, we can
12995 convert all the cases into explicit comparisons, but they may
12996 well not be faster than doing the ABS and one comparison.
12997 But ABS (X) <= C is a range comparison, which becomes a subtraction
12998 and a comparison, and is probably faster. */
12999 if (code == LE_EXPR
13000 && TREE_CODE (arg1) == INTEGER_CST
13001 && TREE_CODE (arg0) == ABS_EXPR
13002 && ! TREE_SIDE_EFFECTS (arg0)
13003 && (0 != (tem = negate_expr (arg1)))
13004 && TREE_CODE (tem) == INTEGER_CST
13005 && !TREE_OVERFLOW (tem))
13006 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13007 build2 (GE_EXPR, type,
13008 TREE_OPERAND (arg0, 0), tem),
13009 build2 (LE_EXPR, type,
13010 TREE_OPERAND (arg0, 0), arg1));
13012 /* Convert ABS_EXPR<x> >= 0 to true. */
13013 strict_overflow_p = false;
13014 if (code == GE_EXPR
13015 && (integer_zerop (arg1)
13016 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13017 && real_zerop (arg1)))
13018 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13020 if (strict_overflow_p)
13021 fold_overflow_warning (("assuming signed overflow does not occur "
13022 "when simplifying comparison of "
13023 "absolute value and zero"),
13024 WARN_STRICT_OVERFLOW_CONDITIONAL);
13025 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13028 /* Convert ABS_EXPR<x> < 0 to false. */
13029 strict_overflow_p = false;
13030 if (code == LT_EXPR
13031 && (integer_zerop (arg1) || real_zerop (arg1))
13032 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13034 if (strict_overflow_p)
13035 fold_overflow_warning (("assuming signed overflow does not occur "
13036 "when simplifying comparison of "
13037 "absolute value and zero"),
13038 WARN_STRICT_OVERFLOW_CONDITIONAL);
13039 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13042 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13043 and similarly for >= into !=. */
13044 if ((code == LT_EXPR || code == GE_EXPR)
13045 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13046 && TREE_CODE (arg1) == LSHIFT_EXPR
13047 && integer_onep (TREE_OPERAND (arg1, 0)))
13048 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13049 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13050 TREE_OPERAND (arg1, 1)),
13051 build_int_cst (TREE_TYPE (arg0), 0));
13053 if ((code == LT_EXPR || code == GE_EXPR)
13054 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13055 && CONVERT_EXPR_P (arg1)
13056 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13057 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13059 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13060 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13061 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13062 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13063 build_int_cst (TREE_TYPE (arg0), 0));
13068 case UNORDERED_EXPR:
13076 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13078 t1 = fold_relational_const (code, type, arg0, arg1);
13079 if (t1 != NULL_TREE)
13083 /* If the first operand is NaN, the result is constant. */
13084 if (TREE_CODE (arg0) == REAL_CST
13085 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13086 && (code != LTGT_EXPR || ! flag_trapping_math))
13088 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13089 ? integer_zero_node
13090 : integer_one_node;
13091 return omit_one_operand_loc (loc, type, t1, arg1);
13094 /* If the second operand is NaN, the result is constant. */
13095 if (TREE_CODE (arg1) == REAL_CST
13096 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13097 && (code != LTGT_EXPR || ! flag_trapping_math))
13099 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13100 ? integer_zero_node
13101 : integer_one_node;
13102 return omit_one_operand_loc (loc, type, t1, arg0);
13105 /* Simplify unordered comparison of something with itself. */
13106 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13107 && operand_equal_p (arg0, arg1, 0))
13108 return constant_boolean_node (1, type);
13110 if (code == LTGT_EXPR
13111 && !flag_trapping_math
13112 && operand_equal_p (arg0, arg1, 0))
13113 return constant_boolean_node (0, type);
13115 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13117 tree targ0 = strip_float_extensions (arg0);
13118 tree targ1 = strip_float_extensions (arg1);
13119 tree newtype = TREE_TYPE (targ0);
13121 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13122 newtype = TREE_TYPE (targ1);
13124 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13125 return fold_build2_loc (loc, code, type,
13126 fold_convert_loc (loc, newtype, targ0),
13127 fold_convert_loc (loc, newtype, targ1));
13132 case COMPOUND_EXPR:
13133 /* When pedantic, a compound expression can be neither an lvalue
13134 nor an integer constant expression. */
13135 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13137 /* Don't let (0, 0) be null pointer constant. */
13138 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13139 : fold_convert_loc (loc, type, arg1);
13140 return pedantic_non_lvalue_loc (loc, tem);
13143 if ((TREE_CODE (arg0) == REAL_CST
13144 && TREE_CODE (arg1) == REAL_CST)
13145 || (TREE_CODE (arg0) == INTEGER_CST
13146 && TREE_CODE (arg1) == INTEGER_CST))
13147 return build_complex (type, arg0, arg1);
13148 if (TREE_CODE (arg0) == REALPART_EXPR
13149 && TREE_CODE (arg1) == IMAGPART_EXPR
13150 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 0)))
13151 == TYPE_MAIN_VARIANT (type))
13152 && operand_equal_p (TREE_OPERAND (arg0, 0),
13153 TREE_OPERAND (arg1, 0), 0))
13154 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13155 TREE_OPERAND (arg1, 0));
13159 /* An ASSERT_EXPR should never be passed to fold_binary. */
13160 gcc_unreachable ();
13164 } /* switch (code) */
13167 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13168 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13172 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13174 switch (TREE_CODE (*tp))
13180 *walk_subtrees = 0;
13182 /* ... fall through ... */
13189 /* Return whether the sub-tree ST contains a label which is accessible from
13190 outside the sub-tree. */
13193 contains_label_p (tree st)
13196 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13199 /* Fold a ternary expression of code CODE and type TYPE with operands
13200 OP0, OP1, and OP2. Return the folded expression if folding is
13201 successful. Otherwise, return NULL_TREE. */
13204 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13205 tree op0, tree op1, tree op2)
13208 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13209 enum tree_code_class kind = TREE_CODE_CLASS (code);
13211 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13212 && TREE_CODE_LENGTH (code) == 3);
13214 /* Strip any conversions that don't change the mode. This is safe
13215 for every expression, except for a comparison expression because
13216 its signedness is derived from its operands. So, in the latter
13217 case, only strip conversions that don't change the signedness.
13219 Note that this is done as an internal manipulation within the
13220 constant folder, in order to find the simplest representation of
13221 the arguments so that their form can be studied. In any cases,
13222 the appropriate type conversions should be put back in the tree
13223 that will get out of the constant folder. */
13244 case COMPONENT_REF:
13245 if (TREE_CODE (arg0) == CONSTRUCTOR
13246 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13248 unsigned HOST_WIDE_INT idx;
13250 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13257 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13258 so all simple results must be passed through pedantic_non_lvalue. */
13259 if (TREE_CODE (arg0) == INTEGER_CST)
13261 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13262 tem = integer_zerop (arg0) ? op2 : op1;
13263 /* Only optimize constant conditions when the selected branch
13264 has the same type as the COND_EXPR. This avoids optimizing
13265 away "c ? x : throw", where the throw has a void type.
13266 Avoid throwing away that operand which contains label. */
13267 if ((!TREE_SIDE_EFFECTS (unused_op)
13268 || !contains_label_p (unused_op))
13269 && (! VOID_TYPE_P (TREE_TYPE (tem))
13270 || VOID_TYPE_P (type)))
13271 return pedantic_non_lvalue_loc (loc, tem);
13274 if (operand_equal_p (arg1, op2, 0))
13275 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13277 /* If we have A op B ? A : C, we may be able to convert this to a
13278 simpler expression, depending on the operation and the values
13279 of B and C. Signed zeros prevent all of these transformations,
13280 for reasons given above each one.
13282 Also try swapping the arguments and inverting the conditional. */
13283 if (COMPARISON_CLASS_P (arg0)
13284 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13285 arg1, TREE_OPERAND (arg0, 1))
13286 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13288 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13293 if (COMPARISON_CLASS_P (arg0)
13294 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13296 TREE_OPERAND (arg0, 1))
13297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13299 location_t loc0 = EXPR_LOCATION (arg0);
13300 if (loc0 == UNKNOWN_LOCATION)
13302 tem = fold_truth_not_expr (loc0, arg0);
13303 if (tem && COMPARISON_CLASS_P (tem))
13305 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13311 /* If the second operand is simpler than the third, swap them
13312 since that produces better jump optimization results. */
13313 if (truth_value_p (TREE_CODE (arg0))
13314 && tree_swap_operands_p (op1, op2, false))
13316 location_t loc0 = EXPR_LOCATION (arg0);
13317 if (loc0 == UNKNOWN_LOCATION)
13319 /* See if this can be inverted. If it can't, possibly because
13320 it was a floating-point inequality comparison, don't do
13322 tem = fold_truth_not_expr (loc0, arg0);
13324 return fold_build3_loc (loc, code, type, tem, op2, op1);
13327 /* Convert A ? 1 : 0 to simply A. */
13328 if (integer_onep (op1)
13329 && integer_zerop (op2)
13330 /* If we try to convert OP0 to our type, the
13331 call to fold will try to move the conversion inside
13332 a COND, which will recurse. In that case, the COND_EXPR
13333 is probably the best choice, so leave it alone. */
13334 && type == TREE_TYPE (arg0))
13335 return pedantic_non_lvalue_loc (loc, arg0);
13337 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13338 over COND_EXPR in cases such as floating point comparisons. */
13339 if (integer_zerop (op1)
13340 && integer_onep (op2)
13341 && truth_value_p (TREE_CODE (arg0)))
13342 return pedantic_non_lvalue_loc (loc,
13343 fold_convert_loc (loc, type,
13344 invert_truthvalue_loc (loc,
13347 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13348 if (TREE_CODE (arg0) == LT_EXPR
13349 && integer_zerop (TREE_OPERAND (arg0, 1))
13350 && integer_zerop (op2)
13351 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13353 /* sign_bit_p only checks ARG1 bits within A's precision.
13354 If <sign bit of A> has wider type than A, bits outside
13355 of A's precision in <sign bit of A> need to be checked.
13356 If they are all 0, this optimization needs to be done
13357 in unsigned A's type, if they are all 1 in signed A's type,
13358 otherwise this can't be done. */
13359 if (TYPE_PRECISION (TREE_TYPE (tem))
13360 < TYPE_PRECISION (TREE_TYPE (arg1))
13361 && TYPE_PRECISION (TREE_TYPE (tem))
13362 < TYPE_PRECISION (type))
13364 unsigned HOST_WIDE_INT mask_lo;
13365 HOST_WIDE_INT mask_hi;
13366 int inner_width, outer_width;
13369 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13370 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13371 if (outer_width > TYPE_PRECISION (type))
13372 outer_width = TYPE_PRECISION (type);
13374 if (outer_width > HOST_BITS_PER_WIDE_INT)
13376 mask_hi = ((unsigned HOST_WIDE_INT) -1
13377 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13383 mask_lo = ((unsigned HOST_WIDE_INT) -1
13384 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13386 if (inner_width > HOST_BITS_PER_WIDE_INT)
13388 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13389 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13393 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13394 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13396 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13397 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13399 tem_type = signed_type_for (TREE_TYPE (tem));
13400 tem = fold_convert_loc (loc, tem_type, tem);
13402 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13403 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13405 tem_type = unsigned_type_for (TREE_TYPE (tem));
13406 tem = fold_convert_loc (loc, tem_type, tem);
13414 fold_convert_loc (loc, type,
13415 fold_build2_loc (loc, BIT_AND_EXPR,
13416 TREE_TYPE (tem), tem,
13417 fold_convert_loc (loc,
13422 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13423 already handled above. */
13424 if (TREE_CODE (arg0) == BIT_AND_EXPR
13425 && integer_onep (TREE_OPERAND (arg0, 1))
13426 && integer_zerop (op2)
13427 && integer_pow2p (arg1))
13429 tree tem = TREE_OPERAND (arg0, 0);
13431 if (TREE_CODE (tem) == RSHIFT_EXPR
13432 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13433 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13434 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13435 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13436 TREE_OPERAND (tem, 0), arg1);
13439 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13440 is probably obsolete because the first operand should be a
13441 truth value (that's why we have the two cases above), but let's
13442 leave it in until we can confirm this for all front-ends. */
13443 if (integer_zerop (op2)
13444 && TREE_CODE (arg0) == NE_EXPR
13445 && integer_zerop (TREE_OPERAND (arg0, 1))
13446 && integer_pow2p (arg1)
13447 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13448 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13449 arg1, OEP_ONLY_CONST))
13450 return pedantic_non_lvalue_loc (loc,
13451 fold_convert_loc (loc, type,
13452 TREE_OPERAND (arg0, 0)));
13454 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13455 if (integer_zerop (op2)
13456 && truth_value_p (TREE_CODE (arg0))
13457 && truth_value_p (TREE_CODE (arg1)))
13458 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13459 fold_convert_loc (loc, type, arg0),
13462 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13463 if (integer_onep (op2)
13464 && truth_value_p (TREE_CODE (arg0))
13465 && truth_value_p (TREE_CODE (arg1)))
13467 location_t loc0 = EXPR_LOCATION (arg0);
13468 if (loc0 == UNKNOWN_LOCATION)
13470 /* Only perform transformation if ARG0 is easily inverted. */
13471 tem = fold_truth_not_expr (loc0, arg0);
13473 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13474 fold_convert_loc (loc, type, tem),
13478 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13479 if (integer_zerop (arg1)
13480 && truth_value_p (TREE_CODE (arg0))
13481 && truth_value_p (TREE_CODE (op2)))
13483 location_t loc0 = EXPR_LOCATION (arg0);
13484 if (loc0 == UNKNOWN_LOCATION)
13486 /* Only perform transformation if ARG0 is easily inverted. */
13487 tem = fold_truth_not_expr (loc0, arg0);
13489 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13490 fold_convert_loc (loc, type, tem),
13494 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13495 if (integer_onep (arg1)
13496 && truth_value_p (TREE_CODE (arg0))
13497 && truth_value_p (TREE_CODE (op2)))
13498 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13499 fold_convert_loc (loc, type, arg0),
13505 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13506 of fold_ternary on them. */
13507 gcc_unreachable ();
13509 case BIT_FIELD_REF:
13510 if ((TREE_CODE (arg0) == VECTOR_CST
13511 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13512 && type == TREE_TYPE (TREE_TYPE (arg0)))
13514 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13515 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13518 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13519 && (idx % width) == 0
13520 && (idx = idx / width)
13521 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13523 tree elements = NULL_TREE;
13525 if (TREE_CODE (arg0) == VECTOR_CST)
13526 elements = TREE_VECTOR_CST_ELTS (arg0);
13529 unsigned HOST_WIDE_INT idx;
13532 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13533 elements = tree_cons (NULL_TREE, value, elements);
13535 while (idx-- > 0 && elements)
13536 elements = TREE_CHAIN (elements);
13538 return TREE_VALUE (elements);
13540 return build_zero_cst (type);
13544 /* A bit-field-ref that referenced the full argument can be stripped. */
13545 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13546 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13547 && integer_zerop (op2))
13548 return fold_convert_loc (loc, type, arg0);
13553 /* For integers we can decompose the FMA if possible. */
13554 if (TREE_CODE (arg0) == INTEGER_CST
13555 && TREE_CODE (arg1) == INTEGER_CST)
13556 return fold_build2_loc (loc, PLUS_EXPR, type,
13557 const_binop (MULT_EXPR, arg0, arg1), arg2);
13558 if (integer_zerop (arg2))
13559 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13561 return fold_fma (loc, type, arg0, arg1, arg2);
13565 } /* switch (code) */
13568 /* Perform constant folding and related simplification of EXPR.
13569 The related simplifications include x*1 => x, x*0 => 0, etc.,
13570 and application of the associative law.
13571 NOP_EXPR conversions may be removed freely (as long as we
13572 are careful not to change the type of the overall expression).
13573 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13574 but we can constant-fold them if they have constant operands. */
13576 #ifdef ENABLE_FOLD_CHECKING
13577 # define fold(x) fold_1 (x)
13578 static tree fold_1 (tree);
13584 const tree t = expr;
13585 enum tree_code code = TREE_CODE (t);
13586 enum tree_code_class kind = TREE_CODE_CLASS (code);
13588 location_t loc = EXPR_LOCATION (expr);
13590 /* Return right away if a constant. */
13591 if (kind == tcc_constant)
13594 /* CALL_EXPR-like objects with variable numbers of operands are
13595 treated specially. */
13596 if (kind == tcc_vl_exp)
13598 if (code == CALL_EXPR)
13600 tem = fold_call_expr (loc, expr, false);
13601 return tem ? tem : expr;
13606 if (IS_EXPR_CODE_CLASS (kind))
13608 tree type = TREE_TYPE (t);
13609 tree op0, op1, op2;
13611 switch (TREE_CODE_LENGTH (code))
13614 op0 = TREE_OPERAND (t, 0);
13615 tem = fold_unary_loc (loc, code, type, op0);
13616 return tem ? tem : expr;
13618 op0 = TREE_OPERAND (t, 0);
13619 op1 = TREE_OPERAND (t, 1);
13620 tem = fold_binary_loc (loc, code, type, op0, op1);
13621 return tem ? tem : expr;
13623 op0 = TREE_OPERAND (t, 0);
13624 op1 = TREE_OPERAND (t, 1);
13625 op2 = TREE_OPERAND (t, 2);
13626 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13627 return tem ? tem : expr;
13637 tree op0 = TREE_OPERAND (t, 0);
13638 tree op1 = TREE_OPERAND (t, 1);
13640 if (TREE_CODE (op1) == INTEGER_CST
13641 && TREE_CODE (op0) == CONSTRUCTOR
13642 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13644 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13645 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13646 unsigned HOST_WIDE_INT begin = 0;
13648 /* Find a matching index by means of a binary search. */
13649 while (begin != end)
13651 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13652 tree index = VEC_index (constructor_elt, elts, middle)->index;
13654 if (TREE_CODE (index) == INTEGER_CST
13655 && tree_int_cst_lt (index, op1))
13656 begin = middle + 1;
13657 else if (TREE_CODE (index) == INTEGER_CST
13658 && tree_int_cst_lt (op1, index))
13660 else if (TREE_CODE (index) == RANGE_EXPR
13661 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13662 begin = middle + 1;
13663 else if (TREE_CODE (index) == RANGE_EXPR
13664 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13667 return VEC_index (constructor_elt, elts, middle)->value;
13675 return fold (DECL_INITIAL (t));
13679 } /* switch (code) */
13682 #ifdef ENABLE_FOLD_CHECKING
13685 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13686 static void fold_check_failed (const_tree, const_tree);
13687 void print_fold_checksum (const_tree);
13689 /* When --enable-checking=fold, compute a digest of expr before
13690 and after actual fold call to see if fold did not accidentally
13691 change original expr. */
13697 struct md5_ctx ctx;
13698 unsigned char checksum_before[16], checksum_after[16];
13701 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13702 md5_init_ctx (&ctx);
13703 fold_checksum_tree (expr, &ctx, ht);
13704 md5_finish_ctx (&ctx, checksum_before);
13707 ret = fold_1 (expr);
13709 md5_init_ctx (&ctx);
13710 fold_checksum_tree (expr, &ctx, ht);
13711 md5_finish_ctx (&ctx, checksum_after);
13714 if (memcmp (checksum_before, checksum_after, 16))
13715 fold_check_failed (expr, ret);
13721 print_fold_checksum (const_tree expr)
13723 struct md5_ctx ctx;
13724 unsigned char checksum[16], cnt;
13727 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13728 md5_init_ctx (&ctx);
13729 fold_checksum_tree (expr, &ctx, ht);
13730 md5_finish_ctx (&ctx, checksum);
13732 for (cnt = 0; cnt < 16; ++cnt)
13733 fprintf (stderr, "%02x", checksum[cnt]);
13734 putc ('\n', stderr);
13738 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13740 internal_error ("fold check: original tree changed by fold");
13744 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13747 enum tree_code code;
13748 union tree_node buf;
13753 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13754 <= sizeof (struct tree_function_decl))
13755 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13758 slot = (void **) htab_find_slot (ht, expr, INSERT);
13761 *slot = CONST_CAST_TREE (expr);
13762 code = TREE_CODE (expr);
13763 if (TREE_CODE_CLASS (code) == tcc_declaration
13764 && DECL_ASSEMBLER_NAME_SET_P (expr))
13766 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13767 memcpy ((char *) &buf, expr, tree_size (expr));
13768 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13769 expr = (tree) &buf;
13771 else if (TREE_CODE_CLASS (code) == tcc_type
13772 && (TYPE_POINTER_TO (expr)
13773 || TYPE_REFERENCE_TO (expr)
13774 || TYPE_CACHED_VALUES_P (expr)
13775 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13776 || TYPE_NEXT_VARIANT (expr)))
13778 /* Allow these fields to be modified. */
13780 memcpy ((char *) &buf, expr, tree_size (expr));
13781 expr = tmp = (tree) &buf;
13782 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13783 TYPE_POINTER_TO (tmp) = NULL;
13784 TYPE_REFERENCE_TO (tmp) = NULL;
13785 TYPE_NEXT_VARIANT (tmp) = NULL;
13786 if (TYPE_CACHED_VALUES_P (tmp))
13788 TYPE_CACHED_VALUES_P (tmp) = 0;
13789 TYPE_CACHED_VALUES (tmp) = NULL;
13792 md5_process_bytes (expr, tree_size (expr), ctx);
13793 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13794 if (TREE_CODE_CLASS (code) != tcc_type
13795 && TREE_CODE_CLASS (code) != tcc_declaration
13796 && code != TREE_LIST
13797 && code != SSA_NAME)
13798 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13799 switch (TREE_CODE_CLASS (code))
13805 md5_process_bytes (TREE_STRING_POINTER (expr),
13806 TREE_STRING_LENGTH (expr), ctx);
13809 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13810 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13813 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13819 case tcc_exceptional:
13823 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13824 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13825 expr = TREE_CHAIN (expr);
13826 goto recursive_label;
13829 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13830 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13836 case tcc_expression:
13837 case tcc_reference:
13838 case tcc_comparison:
13841 case tcc_statement:
13843 len = TREE_OPERAND_LENGTH (expr);
13844 for (i = 0; i < len; ++i)
13845 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13847 case tcc_declaration:
13848 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13849 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13850 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13852 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13853 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13854 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13855 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13856 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13858 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13859 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13861 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13863 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13864 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13865 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13869 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13870 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13871 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13872 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13873 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13874 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13875 if (INTEGRAL_TYPE_P (expr)
13876 || SCALAR_FLOAT_TYPE_P (expr))
13878 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13879 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13881 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13882 if (TREE_CODE (expr) == RECORD_TYPE
13883 || TREE_CODE (expr) == UNION_TYPE
13884 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13885 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13886 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13893 /* Helper function for outputting the checksum of a tree T. When
13894 debugging with gdb, you can "define mynext" to be "next" followed
13895 by "call debug_fold_checksum (op0)", then just trace down till the
13898 DEBUG_FUNCTION void
13899 debug_fold_checksum (const_tree t)
13902 unsigned char checksum[16];
13903 struct md5_ctx ctx;
13904 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13906 md5_init_ctx (&ctx);
13907 fold_checksum_tree (t, &ctx, ht);
13908 md5_finish_ctx (&ctx, checksum);
13911 for (i = 0; i < 16; i++)
13912 fprintf (stderr, "%d ", checksum[i]);
13914 fprintf (stderr, "\n");
13919 /* Fold a unary tree expression with code CODE of type TYPE with an
13920 operand OP0. LOC is the location of the resulting expression.
13921 Return a folded expression if successful. Otherwise, return a tree
13922 expression with code CODE of type TYPE with an operand OP0. */
13925 fold_build1_stat_loc (location_t loc,
13926 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13929 #ifdef ENABLE_FOLD_CHECKING
13930 unsigned char checksum_before[16], checksum_after[16];
13931 struct md5_ctx ctx;
13934 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13935 md5_init_ctx (&ctx);
13936 fold_checksum_tree (op0, &ctx, ht);
13937 md5_finish_ctx (&ctx, checksum_before);
13941 tem = fold_unary_loc (loc, code, type, op0);
13943 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13945 #ifdef ENABLE_FOLD_CHECKING
13946 md5_init_ctx (&ctx);
13947 fold_checksum_tree (op0, &ctx, ht);
13948 md5_finish_ctx (&ctx, checksum_after);
13951 if (memcmp (checksum_before, checksum_after, 16))
13952 fold_check_failed (op0, tem);
13957 /* Fold a binary tree expression with code CODE of type TYPE with
13958 operands OP0 and OP1. LOC is the location of the resulting
13959 expression. Return a folded expression if successful. Otherwise,
13960 return a tree expression with code CODE of type TYPE with operands
13964 fold_build2_stat_loc (location_t loc,
13965 enum tree_code code, tree type, tree op0, tree op1
13969 #ifdef ENABLE_FOLD_CHECKING
13970 unsigned char checksum_before_op0[16],
13971 checksum_before_op1[16],
13972 checksum_after_op0[16],
13973 checksum_after_op1[16];
13974 struct md5_ctx ctx;
13977 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13978 md5_init_ctx (&ctx);
13979 fold_checksum_tree (op0, &ctx, ht);
13980 md5_finish_ctx (&ctx, checksum_before_op0);
13983 md5_init_ctx (&ctx);
13984 fold_checksum_tree (op1, &ctx, ht);
13985 md5_finish_ctx (&ctx, checksum_before_op1);
13989 tem = fold_binary_loc (loc, code, type, op0, op1);
13991 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13993 #ifdef ENABLE_FOLD_CHECKING
13994 md5_init_ctx (&ctx);
13995 fold_checksum_tree (op0, &ctx, ht);
13996 md5_finish_ctx (&ctx, checksum_after_op0);
13999 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14000 fold_check_failed (op0, tem);
14002 md5_init_ctx (&ctx);
14003 fold_checksum_tree (op1, &ctx, ht);
14004 md5_finish_ctx (&ctx, checksum_after_op1);
14007 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14008 fold_check_failed (op1, tem);
14013 /* Fold a ternary tree expression with code CODE of type TYPE with
14014 operands OP0, OP1, and OP2. Return a folded expression if
14015 successful. Otherwise, return a tree expression with code CODE of
14016 type TYPE with operands OP0, OP1, and OP2. */
14019 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14020 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14023 #ifdef ENABLE_FOLD_CHECKING
14024 unsigned char checksum_before_op0[16],
14025 checksum_before_op1[16],
14026 checksum_before_op2[16],
14027 checksum_after_op0[16],
14028 checksum_after_op1[16],
14029 checksum_after_op2[16];
14030 struct md5_ctx ctx;
14033 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14034 md5_init_ctx (&ctx);
14035 fold_checksum_tree (op0, &ctx, ht);
14036 md5_finish_ctx (&ctx, checksum_before_op0);
14039 md5_init_ctx (&ctx);
14040 fold_checksum_tree (op1, &ctx, ht);
14041 md5_finish_ctx (&ctx, checksum_before_op1);
14044 md5_init_ctx (&ctx);
14045 fold_checksum_tree (op2, &ctx, ht);
14046 md5_finish_ctx (&ctx, checksum_before_op2);
14050 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14051 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14053 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14055 #ifdef ENABLE_FOLD_CHECKING
14056 md5_init_ctx (&ctx);
14057 fold_checksum_tree (op0, &ctx, ht);
14058 md5_finish_ctx (&ctx, checksum_after_op0);
14061 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14062 fold_check_failed (op0, tem);
14064 md5_init_ctx (&ctx);
14065 fold_checksum_tree (op1, &ctx, ht);
14066 md5_finish_ctx (&ctx, checksum_after_op1);
14069 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14070 fold_check_failed (op1, tem);
14072 md5_init_ctx (&ctx);
14073 fold_checksum_tree (op2, &ctx, ht);
14074 md5_finish_ctx (&ctx, checksum_after_op2);
14077 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14078 fold_check_failed (op2, tem);
14083 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14084 arguments in ARGARRAY, and a null static chain.
14085 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14086 of type TYPE from the given operands as constructed by build_call_array. */
14089 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14090 int nargs, tree *argarray)
14093 #ifdef ENABLE_FOLD_CHECKING
14094 unsigned char checksum_before_fn[16],
14095 checksum_before_arglist[16],
14096 checksum_after_fn[16],
14097 checksum_after_arglist[16];
14098 struct md5_ctx ctx;
14102 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14103 md5_init_ctx (&ctx);
14104 fold_checksum_tree (fn, &ctx, ht);
14105 md5_finish_ctx (&ctx, checksum_before_fn);
14108 md5_init_ctx (&ctx);
14109 for (i = 0; i < nargs; i++)
14110 fold_checksum_tree (argarray[i], &ctx, ht);
14111 md5_finish_ctx (&ctx, checksum_before_arglist);
14115 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14117 #ifdef ENABLE_FOLD_CHECKING
14118 md5_init_ctx (&ctx);
14119 fold_checksum_tree (fn, &ctx, ht);
14120 md5_finish_ctx (&ctx, checksum_after_fn);
14123 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14124 fold_check_failed (fn, tem);
14126 md5_init_ctx (&ctx);
14127 for (i = 0; i < nargs; i++)
14128 fold_checksum_tree (argarray[i], &ctx, ht);
14129 md5_finish_ctx (&ctx, checksum_after_arglist);
14132 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14133 fold_check_failed (NULL_TREE, tem);
14138 /* Perform constant folding and related simplification of initializer
14139 expression EXPR. These behave identically to "fold_buildN" but ignore
14140 potential run-time traps and exceptions that fold must preserve. */
14142 #define START_FOLD_INIT \
14143 int saved_signaling_nans = flag_signaling_nans;\
14144 int saved_trapping_math = flag_trapping_math;\
14145 int saved_rounding_math = flag_rounding_math;\
14146 int saved_trapv = flag_trapv;\
14147 int saved_folding_initializer = folding_initializer;\
14148 flag_signaling_nans = 0;\
14149 flag_trapping_math = 0;\
14150 flag_rounding_math = 0;\
14152 folding_initializer = 1;
14154 #define END_FOLD_INIT \
14155 flag_signaling_nans = saved_signaling_nans;\
14156 flag_trapping_math = saved_trapping_math;\
14157 flag_rounding_math = saved_rounding_math;\
14158 flag_trapv = saved_trapv;\
14159 folding_initializer = saved_folding_initializer;
14162 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14163 tree type, tree op)
14168 result = fold_build1_loc (loc, code, type, op);
14175 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14176 tree type, tree op0, tree op1)
14181 result = fold_build2_loc (loc, code, type, op0, op1);
14188 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14189 tree type, tree op0, tree op1, tree op2)
14194 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14201 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14202 int nargs, tree *argarray)
14207 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14213 #undef START_FOLD_INIT
14214 #undef END_FOLD_INIT
14216 /* Determine if first argument is a multiple of second argument. Return 0 if
14217 it is not, or we cannot easily determined it to be.
14219 An example of the sort of thing we care about (at this point; this routine
14220 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14221 fold cases do now) is discovering that
14223 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14229 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14231 This code also handles discovering that
14233 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14235 is a multiple of 8 so we don't have to worry about dealing with a
14236 possible remainder.
14238 Note that we *look* inside a SAVE_EXPR only to determine how it was
14239 calculated; it is not safe for fold to do much of anything else with the
14240 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14241 at run time. For example, the latter example above *cannot* be implemented
14242 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14243 evaluation time of the original SAVE_EXPR is not necessarily the same at
14244 the time the new expression is evaluated. The only optimization of this
14245 sort that would be valid is changing
14247 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14251 SAVE_EXPR (I) * SAVE_EXPR (J)
14253 (where the same SAVE_EXPR (J) is used in the original and the
14254 transformed version). */
14257 multiple_of_p (tree type, const_tree top, const_tree bottom)
14259 if (operand_equal_p (top, bottom, 0))
14262 if (TREE_CODE (type) != INTEGER_TYPE)
14265 switch (TREE_CODE (top))
14268 /* Bitwise and provides a power of two multiple. If the mask is
14269 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14270 if (!integer_pow2p (bottom))
14275 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14276 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14280 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14281 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14284 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14288 op1 = TREE_OPERAND (top, 1);
14289 /* const_binop may not detect overflow correctly,
14290 so check for it explicitly here. */
14291 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14292 > TREE_INT_CST_LOW (op1)
14293 && TREE_INT_CST_HIGH (op1) == 0
14294 && 0 != (t1 = fold_convert (type,
14295 const_binop (LSHIFT_EXPR,
14298 && !TREE_OVERFLOW (t1))
14299 return multiple_of_p (type, t1, bottom);
14304 /* Can't handle conversions from non-integral or wider integral type. */
14305 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14306 || (TYPE_PRECISION (type)
14307 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14310 /* .. fall through ... */
14313 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14316 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14317 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14320 if (TREE_CODE (bottom) != INTEGER_CST
14321 || integer_zerop (bottom)
14322 || (TYPE_UNSIGNED (type)
14323 && (tree_int_cst_sgn (top) < 0
14324 || tree_int_cst_sgn (bottom) < 0)))
14326 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14334 /* Return true if CODE or TYPE is known to be non-negative. */
14337 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14339 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14340 && truth_value_p (code))
14341 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14342 have a signed:1 type (where the value is -1 and 0). */
14347 /* Return true if (CODE OP0) is known to be non-negative. If the return
14348 value is based on the assumption that signed overflow is undefined,
14349 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14350 *STRICT_OVERFLOW_P. */
14353 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14354 bool *strict_overflow_p)
14356 if (TYPE_UNSIGNED (type))
14362 /* We can't return 1 if flag_wrapv is set because
14363 ABS_EXPR<INT_MIN> = INT_MIN. */
14364 if (!INTEGRAL_TYPE_P (type))
14366 if (TYPE_OVERFLOW_UNDEFINED (type))
14368 *strict_overflow_p = true;
14373 case NON_LVALUE_EXPR:
14375 case FIX_TRUNC_EXPR:
14376 return tree_expr_nonnegative_warnv_p (op0,
14377 strict_overflow_p);
14381 tree inner_type = TREE_TYPE (op0);
14382 tree outer_type = type;
14384 if (TREE_CODE (outer_type) == REAL_TYPE)
14386 if (TREE_CODE (inner_type) == REAL_TYPE)
14387 return tree_expr_nonnegative_warnv_p (op0,
14388 strict_overflow_p);
14389 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14391 if (TYPE_UNSIGNED (inner_type))
14393 return tree_expr_nonnegative_warnv_p (op0,
14394 strict_overflow_p);
14397 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14399 if (TREE_CODE (inner_type) == REAL_TYPE)
14400 return tree_expr_nonnegative_warnv_p (op0,
14401 strict_overflow_p);
14402 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14403 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14404 && TYPE_UNSIGNED (inner_type);
14410 return tree_simple_nonnegative_warnv_p (code, type);
14413 /* We don't know sign of `t', so be conservative and return false. */
14417 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14418 value is based on the assumption that signed overflow is undefined,
14419 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14420 *STRICT_OVERFLOW_P. */
14423 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14424 tree op1, bool *strict_overflow_p)
14426 if (TYPE_UNSIGNED (type))
14431 case POINTER_PLUS_EXPR:
14433 if (FLOAT_TYPE_P (type))
14434 return (tree_expr_nonnegative_warnv_p (op0,
14436 && tree_expr_nonnegative_warnv_p (op1,
14437 strict_overflow_p));
14439 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14440 both unsigned and at least 2 bits shorter than the result. */
14441 if (TREE_CODE (type) == INTEGER_TYPE
14442 && TREE_CODE (op0) == NOP_EXPR
14443 && TREE_CODE (op1) == NOP_EXPR)
14445 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14446 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14447 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14448 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14450 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14451 TYPE_PRECISION (inner2)) + 1;
14452 return prec < TYPE_PRECISION (type);
14458 if (FLOAT_TYPE_P (type))
14460 /* x * x for floating point x is always non-negative. */
14461 if (operand_equal_p (op0, op1, 0))
14463 return (tree_expr_nonnegative_warnv_p (op0,
14465 && tree_expr_nonnegative_warnv_p (op1,
14466 strict_overflow_p));
14469 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14470 both unsigned and their total bits is shorter than the result. */
14471 if (TREE_CODE (type) == INTEGER_TYPE
14472 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14473 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14475 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14476 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14478 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14479 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14482 bool unsigned0 = TYPE_UNSIGNED (inner0);
14483 bool unsigned1 = TYPE_UNSIGNED (inner1);
14485 if (TREE_CODE (op0) == INTEGER_CST)
14486 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14488 if (TREE_CODE (op1) == INTEGER_CST)
14489 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14491 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14492 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14494 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14495 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14496 : TYPE_PRECISION (inner0);
14498 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14499 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14500 : TYPE_PRECISION (inner1);
14502 return precision0 + precision1 < TYPE_PRECISION (type);
14509 return (tree_expr_nonnegative_warnv_p (op0,
14511 || tree_expr_nonnegative_warnv_p (op1,
14512 strict_overflow_p));
14518 case TRUNC_DIV_EXPR:
14519 case CEIL_DIV_EXPR:
14520 case FLOOR_DIV_EXPR:
14521 case ROUND_DIV_EXPR:
14522 return (tree_expr_nonnegative_warnv_p (op0,
14524 && tree_expr_nonnegative_warnv_p (op1,
14525 strict_overflow_p));
14527 case TRUNC_MOD_EXPR:
14528 case CEIL_MOD_EXPR:
14529 case FLOOR_MOD_EXPR:
14530 case ROUND_MOD_EXPR:
14531 return tree_expr_nonnegative_warnv_p (op0,
14532 strict_overflow_p);
14534 return tree_simple_nonnegative_warnv_p (code, type);
14537 /* We don't know sign of `t', so be conservative and return false. */
14541 /* Return true if T is known to be non-negative. If the return
14542 value is based on the assumption that signed overflow is undefined,
14543 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14544 *STRICT_OVERFLOW_P. */
14547 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14549 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14552 switch (TREE_CODE (t))
14555 return tree_int_cst_sgn (t) >= 0;
14558 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14561 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14564 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14566 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14567 strict_overflow_p));
14569 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14572 /* We don't know sign of `t', so be conservative and return false. */
14576 /* Return true if T is known to be non-negative. If the return
14577 value is based on the assumption that signed overflow is undefined,
14578 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14579 *STRICT_OVERFLOW_P. */
14582 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14583 tree arg0, tree arg1, bool *strict_overflow_p)
14585 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14586 switch (DECL_FUNCTION_CODE (fndecl))
14588 CASE_FLT_FN (BUILT_IN_ACOS):
14589 CASE_FLT_FN (BUILT_IN_ACOSH):
14590 CASE_FLT_FN (BUILT_IN_CABS):
14591 CASE_FLT_FN (BUILT_IN_COSH):
14592 CASE_FLT_FN (BUILT_IN_ERFC):
14593 CASE_FLT_FN (BUILT_IN_EXP):
14594 CASE_FLT_FN (BUILT_IN_EXP10):
14595 CASE_FLT_FN (BUILT_IN_EXP2):
14596 CASE_FLT_FN (BUILT_IN_FABS):
14597 CASE_FLT_FN (BUILT_IN_FDIM):
14598 CASE_FLT_FN (BUILT_IN_HYPOT):
14599 CASE_FLT_FN (BUILT_IN_POW10):
14600 CASE_INT_FN (BUILT_IN_FFS):
14601 CASE_INT_FN (BUILT_IN_PARITY):
14602 CASE_INT_FN (BUILT_IN_POPCOUNT):
14603 case BUILT_IN_BSWAP32:
14604 case BUILT_IN_BSWAP64:
14608 CASE_FLT_FN (BUILT_IN_SQRT):
14609 /* sqrt(-0.0) is -0.0. */
14610 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14612 return tree_expr_nonnegative_warnv_p (arg0,
14613 strict_overflow_p);
14615 CASE_FLT_FN (BUILT_IN_ASINH):
14616 CASE_FLT_FN (BUILT_IN_ATAN):
14617 CASE_FLT_FN (BUILT_IN_ATANH):
14618 CASE_FLT_FN (BUILT_IN_CBRT):
14619 CASE_FLT_FN (BUILT_IN_CEIL):
14620 CASE_FLT_FN (BUILT_IN_ERF):
14621 CASE_FLT_FN (BUILT_IN_EXPM1):
14622 CASE_FLT_FN (BUILT_IN_FLOOR):
14623 CASE_FLT_FN (BUILT_IN_FMOD):
14624 CASE_FLT_FN (BUILT_IN_FREXP):
14625 CASE_FLT_FN (BUILT_IN_LCEIL):
14626 CASE_FLT_FN (BUILT_IN_LDEXP):
14627 CASE_FLT_FN (BUILT_IN_LFLOOR):
14628 CASE_FLT_FN (BUILT_IN_LLCEIL):
14629 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14630 CASE_FLT_FN (BUILT_IN_LLRINT):
14631 CASE_FLT_FN (BUILT_IN_LLROUND):
14632 CASE_FLT_FN (BUILT_IN_LRINT):
14633 CASE_FLT_FN (BUILT_IN_LROUND):
14634 CASE_FLT_FN (BUILT_IN_MODF):
14635 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14636 CASE_FLT_FN (BUILT_IN_RINT):
14637 CASE_FLT_FN (BUILT_IN_ROUND):
14638 CASE_FLT_FN (BUILT_IN_SCALB):
14639 CASE_FLT_FN (BUILT_IN_SCALBLN):
14640 CASE_FLT_FN (BUILT_IN_SCALBN):
14641 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14642 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14643 CASE_FLT_FN (BUILT_IN_SINH):
14644 CASE_FLT_FN (BUILT_IN_TANH):
14645 CASE_FLT_FN (BUILT_IN_TRUNC):
14646 /* True if the 1st argument is nonnegative. */
14647 return tree_expr_nonnegative_warnv_p (arg0,
14648 strict_overflow_p);
14650 CASE_FLT_FN (BUILT_IN_FMAX):
14651 /* True if the 1st OR 2nd arguments are nonnegative. */
14652 return (tree_expr_nonnegative_warnv_p (arg0,
14654 || (tree_expr_nonnegative_warnv_p (arg1,
14655 strict_overflow_p)));
14657 CASE_FLT_FN (BUILT_IN_FMIN):
14658 /* True if the 1st AND 2nd arguments are nonnegative. */
14659 return (tree_expr_nonnegative_warnv_p (arg0,
14661 && (tree_expr_nonnegative_warnv_p (arg1,
14662 strict_overflow_p)));
14664 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14665 /* True if the 2nd argument is nonnegative. */
14666 return tree_expr_nonnegative_warnv_p (arg1,
14667 strict_overflow_p);
14669 CASE_FLT_FN (BUILT_IN_POWI):
14670 /* True if the 1st argument is nonnegative or the second
14671 argument is an even integer. */
14672 if (TREE_CODE (arg1) == INTEGER_CST
14673 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14675 return tree_expr_nonnegative_warnv_p (arg0,
14676 strict_overflow_p);
14678 CASE_FLT_FN (BUILT_IN_POW):
14679 /* True if the 1st argument is nonnegative or the second
14680 argument is an even integer valued real. */
14681 if (TREE_CODE (arg1) == REAL_CST)
14686 c = TREE_REAL_CST (arg1);
14687 n = real_to_integer (&c);
14690 REAL_VALUE_TYPE cint;
14691 real_from_integer (&cint, VOIDmode, n,
14692 n < 0 ? -1 : 0, 0);
14693 if (real_identical (&c, &cint))
14697 return tree_expr_nonnegative_warnv_p (arg0,
14698 strict_overflow_p);
14703 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14707 /* Return true if T is known to be non-negative. If the return
14708 value is based on the assumption that signed overflow is undefined,
14709 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14710 *STRICT_OVERFLOW_P. */
14713 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14715 enum tree_code code = TREE_CODE (t);
14716 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14723 tree temp = TARGET_EXPR_SLOT (t);
14724 t = TARGET_EXPR_INITIAL (t);
14726 /* If the initializer is non-void, then it's a normal expression
14727 that will be assigned to the slot. */
14728 if (!VOID_TYPE_P (t))
14729 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14731 /* Otherwise, the initializer sets the slot in some way. One common
14732 way is an assignment statement at the end of the initializer. */
14735 if (TREE_CODE (t) == BIND_EXPR)
14736 t = expr_last (BIND_EXPR_BODY (t));
14737 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14738 || TREE_CODE (t) == TRY_CATCH_EXPR)
14739 t = expr_last (TREE_OPERAND (t, 0));
14740 else if (TREE_CODE (t) == STATEMENT_LIST)
14745 if (TREE_CODE (t) == MODIFY_EXPR
14746 && TREE_OPERAND (t, 0) == temp)
14747 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14748 strict_overflow_p);
14755 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14756 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14758 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14759 get_callee_fndecl (t),
14762 strict_overflow_p);
14764 case COMPOUND_EXPR:
14766 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14767 strict_overflow_p);
14769 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14770 strict_overflow_p);
14772 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14773 strict_overflow_p);
14776 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14780 /* We don't know sign of `t', so be conservative and return false. */
14784 /* Return true if T is known to be non-negative. If the return
14785 value is based on the assumption that signed overflow is undefined,
14786 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14787 *STRICT_OVERFLOW_P. */
14790 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14792 enum tree_code code;
14793 if (t == error_mark_node)
14796 code = TREE_CODE (t);
14797 switch (TREE_CODE_CLASS (code))
14800 case tcc_comparison:
14801 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14803 TREE_OPERAND (t, 0),
14804 TREE_OPERAND (t, 1),
14805 strict_overflow_p);
14808 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14810 TREE_OPERAND (t, 0),
14811 strict_overflow_p);
14814 case tcc_declaration:
14815 case tcc_reference:
14816 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14824 case TRUTH_AND_EXPR:
14825 case TRUTH_OR_EXPR:
14826 case TRUTH_XOR_EXPR:
14827 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14829 TREE_OPERAND (t, 0),
14830 TREE_OPERAND (t, 1),
14831 strict_overflow_p);
14832 case TRUTH_NOT_EXPR:
14833 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14835 TREE_OPERAND (t, 0),
14836 strict_overflow_p);
14843 case WITH_SIZE_EXPR:
14845 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14848 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14852 /* Return true if `t' is known to be non-negative. Handle warnings
14853 about undefined signed overflow. */
14856 tree_expr_nonnegative_p (tree t)
14858 bool ret, strict_overflow_p;
14860 strict_overflow_p = false;
14861 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14862 if (strict_overflow_p)
14863 fold_overflow_warning (("assuming signed overflow does not occur when "
14864 "determining that expression is always "
14866 WARN_STRICT_OVERFLOW_MISC);
14871 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14872 For floating point we further ensure that T is not denormal.
14873 Similar logic is present in nonzero_address in rtlanal.h.
14875 If the return value is based on the assumption that signed overflow
14876 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14877 change *STRICT_OVERFLOW_P. */
14880 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14881 bool *strict_overflow_p)
14886 return tree_expr_nonzero_warnv_p (op0,
14887 strict_overflow_p);
14891 tree inner_type = TREE_TYPE (op0);
14892 tree outer_type = type;
14894 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14895 && tree_expr_nonzero_warnv_p (op0,
14896 strict_overflow_p));
14900 case NON_LVALUE_EXPR:
14901 return tree_expr_nonzero_warnv_p (op0,
14902 strict_overflow_p);
14911 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14912 For floating point we further ensure that T is not denormal.
14913 Similar logic is present in nonzero_address in rtlanal.h.
14915 If the return value is based on the assumption that signed overflow
14916 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14917 change *STRICT_OVERFLOW_P. */
14920 tree_binary_nonzero_warnv_p (enum tree_code code,
14923 tree op1, bool *strict_overflow_p)
14925 bool sub_strict_overflow_p;
14928 case POINTER_PLUS_EXPR:
14930 if (TYPE_OVERFLOW_UNDEFINED (type))
14932 /* With the presence of negative values it is hard
14933 to say something. */
14934 sub_strict_overflow_p = false;
14935 if (!tree_expr_nonnegative_warnv_p (op0,
14936 &sub_strict_overflow_p)
14937 || !tree_expr_nonnegative_warnv_p (op1,
14938 &sub_strict_overflow_p))
14940 /* One of operands must be positive and the other non-negative. */
14941 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14942 overflows, on a twos-complement machine the sum of two
14943 nonnegative numbers can never be zero. */
14944 return (tree_expr_nonzero_warnv_p (op0,
14946 || tree_expr_nonzero_warnv_p (op1,
14947 strict_overflow_p));
14952 if (TYPE_OVERFLOW_UNDEFINED (type))
14954 if (tree_expr_nonzero_warnv_p (op0,
14956 && tree_expr_nonzero_warnv_p (op1,
14957 strict_overflow_p))
14959 *strict_overflow_p = true;
14966 sub_strict_overflow_p = false;
14967 if (tree_expr_nonzero_warnv_p (op0,
14968 &sub_strict_overflow_p)
14969 && tree_expr_nonzero_warnv_p (op1,
14970 &sub_strict_overflow_p))
14972 if (sub_strict_overflow_p)
14973 *strict_overflow_p = true;
14978 sub_strict_overflow_p = false;
14979 if (tree_expr_nonzero_warnv_p (op0,
14980 &sub_strict_overflow_p))
14982 if (sub_strict_overflow_p)
14983 *strict_overflow_p = true;
14985 /* When both operands are nonzero, then MAX must be too. */
14986 if (tree_expr_nonzero_warnv_p (op1,
14987 strict_overflow_p))
14990 /* MAX where operand 0 is positive is positive. */
14991 return tree_expr_nonnegative_warnv_p (op0,
14992 strict_overflow_p);
14994 /* MAX where operand 1 is positive is positive. */
14995 else if (tree_expr_nonzero_warnv_p (op1,
14996 &sub_strict_overflow_p)
14997 && tree_expr_nonnegative_warnv_p (op1,
14998 &sub_strict_overflow_p))
15000 if (sub_strict_overflow_p)
15001 *strict_overflow_p = true;
15007 return (tree_expr_nonzero_warnv_p (op1,
15009 || tree_expr_nonzero_warnv_p (op0,
15010 strict_overflow_p));
15019 /* Return true when T is an address and is known to be nonzero.
15020 For floating point we further ensure that T is not denormal.
15021 Similar logic is present in nonzero_address in rtlanal.h.
15023 If the return value is based on the assumption that signed overflow
15024 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15025 change *STRICT_OVERFLOW_P. */
15028 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15030 bool sub_strict_overflow_p;
15031 switch (TREE_CODE (t))
15034 return !integer_zerop (t);
15038 tree base = TREE_OPERAND (t, 0);
15039 if (!DECL_P (base))
15040 base = get_base_address (base);
15045 /* Weak declarations may link to NULL. Other things may also be NULL
15046 so protect with -fdelete-null-pointer-checks; but not variables
15047 allocated on the stack. */
15049 && (flag_delete_null_pointer_checks
15050 || (DECL_CONTEXT (base)
15051 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15052 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15053 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15055 /* Constants are never weak. */
15056 if (CONSTANT_CLASS_P (base))
15063 sub_strict_overflow_p = false;
15064 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15065 &sub_strict_overflow_p)
15066 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15067 &sub_strict_overflow_p))
15069 if (sub_strict_overflow_p)
15070 *strict_overflow_p = true;
15081 /* Return true when T is an address and is known to be nonzero.
15082 For floating point we further ensure that T is not denormal.
15083 Similar logic is present in nonzero_address in rtlanal.h.
15085 If the return value is based on the assumption that signed overflow
15086 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15087 change *STRICT_OVERFLOW_P. */
15090 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15092 tree type = TREE_TYPE (t);
15093 enum tree_code code;
15095 /* Doing something useful for floating point would need more work. */
15096 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15099 code = TREE_CODE (t);
15100 switch (TREE_CODE_CLASS (code))
15103 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15104 strict_overflow_p);
15106 case tcc_comparison:
15107 return tree_binary_nonzero_warnv_p (code, type,
15108 TREE_OPERAND (t, 0),
15109 TREE_OPERAND (t, 1),
15110 strict_overflow_p);
15112 case tcc_declaration:
15113 case tcc_reference:
15114 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15122 case TRUTH_NOT_EXPR:
15123 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15124 strict_overflow_p);
15126 case TRUTH_AND_EXPR:
15127 case TRUTH_OR_EXPR:
15128 case TRUTH_XOR_EXPR:
15129 return tree_binary_nonzero_warnv_p (code, type,
15130 TREE_OPERAND (t, 0),
15131 TREE_OPERAND (t, 1),
15132 strict_overflow_p);
15139 case WITH_SIZE_EXPR:
15141 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15143 case COMPOUND_EXPR:
15146 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15147 strict_overflow_p);
15150 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15151 strict_overflow_p);
15154 return alloca_call_p (t);
15162 /* Return true when T is an address and is known to be nonzero.
15163 Handle warnings about undefined signed overflow. */
15166 tree_expr_nonzero_p (tree t)
15168 bool ret, strict_overflow_p;
15170 strict_overflow_p = false;
15171 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15172 if (strict_overflow_p)
15173 fold_overflow_warning (("assuming signed overflow does not occur when "
15174 "determining that expression is always "
15176 WARN_STRICT_OVERFLOW_MISC);
15180 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15181 attempt to fold the expression to a constant without modifying TYPE,
15184 If the expression could be simplified to a constant, then return
15185 the constant. If the expression would not be simplified to a
15186 constant, then return NULL_TREE. */
15189 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15191 tree tem = fold_binary (code, type, op0, op1);
15192 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15195 /* Given the components of a unary expression CODE, TYPE and OP0,
15196 attempt to fold the expression to a constant without modifying
15199 If the expression could be simplified to a constant, then return
15200 the constant. If the expression would not be simplified to a
15201 constant, then return NULL_TREE. */
15204 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15206 tree tem = fold_unary (code, type, op0);
15207 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15210 /* If EXP represents referencing an element in a constant string
15211 (either via pointer arithmetic or array indexing), return the
15212 tree representing the value accessed, otherwise return NULL. */
15215 fold_read_from_constant_string (tree exp)
15217 if ((TREE_CODE (exp) == INDIRECT_REF
15218 || TREE_CODE (exp) == ARRAY_REF)
15219 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15221 tree exp1 = TREE_OPERAND (exp, 0);
15224 location_t loc = EXPR_LOCATION (exp);
15226 if (TREE_CODE (exp) == INDIRECT_REF)
15227 string = string_constant (exp1, &index);
15230 tree low_bound = array_ref_low_bound (exp);
15231 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15233 /* Optimize the special-case of a zero lower bound.
15235 We convert the low_bound to sizetype to avoid some problems
15236 with constant folding. (E.g. suppose the lower bound is 1,
15237 and its mode is QI. Without the conversion,l (ARRAY
15238 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15239 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15240 if (! integer_zerop (low_bound))
15241 index = size_diffop_loc (loc, index,
15242 fold_convert_loc (loc, sizetype, low_bound));
15248 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15249 && TREE_CODE (string) == STRING_CST
15250 && TREE_CODE (index) == INTEGER_CST
15251 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15252 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15254 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15255 return build_int_cst_type (TREE_TYPE (exp),
15256 (TREE_STRING_POINTER (string)
15257 [TREE_INT_CST_LOW (index)]));
15262 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15263 an integer constant, real, or fixed-point constant.
15265 TYPE is the type of the result. */
15268 fold_negate_const (tree arg0, tree type)
15270 tree t = NULL_TREE;
15272 switch (TREE_CODE (arg0))
15276 double_int val = tree_to_double_int (arg0);
15277 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15279 t = force_fit_type_double (type, val, 1,
15280 (overflow | TREE_OVERFLOW (arg0))
15281 && !TYPE_UNSIGNED (type));
15286 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15291 FIXED_VALUE_TYPE f;
15292 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15293 &(TREE_FIXED_CST (arg0)), NULL,
15294 TYPE_SATURATING (type));
15295 t = build_fixed (type, f);
15296 /* Propagate overflow flags. */
15297 if (overflow_p | TREE_OVERFLOW (arg0))
15298 TREE_OVERFLOW (t) = 1;
15303 gcc_unreachable ();
15309 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15310 an integer constant or real constant.
15312 TYPE is the type of the result. */
15315 fold_abs_const (tree arg0, tree type)
15317 tree t = NULL_TREE;
15319 switch (TREE_CODE (arg0))
15323 double_int val = tree_to_double_int (arg0);
15325 /* If the value is unsigned or non-negative, then the absolute value
15326 is the same as the ordinary value. */
15327 if (TYPE_UNSIGNED (type)
15328 || !double_int_negative_p (val))
15331 /* If the value is negative, then the absolute value is
15337 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15338 t = force_fit_type_double (type, val, -1,
15339 overflow | TREE_OVERFLOW (arg0));
15345 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15346 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15352 gcc_unreachable ();
15358 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15359 constant. TYPE is the type of the result. */
15362 fold_not_const (const_tree arg0, tree type)
15366 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15368 val = double_int_not (tree_to_double_int (arg0));
15369 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15372 /* Given CODE, a relational operator, the target type, TYPE and two
15373 constant operands OP0 and OP1, return the result of the
15374 relational operation. If the result is not a compile time
15375 constant, then return NULL_TREE. */
15378 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15380 int result, invert;
15382 /* From here on, the only cases we handle are when the result is
15383 known to be a constant. */
15385 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15387 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15388 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15390 /* Handle the cases where either operand is a NaN. */
15391 if (real_isnan (c0) || real_isnan (c1))
15401 case UNORDERED_EXPR:
15415 if (flag_trapping_math)
15421 gcc_unreachable ();
15424 return constant_boolean_node (result, type);
15427 return constant_boolean_node (real_compare (code, c0, c1), type);
15430 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15432 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15433 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15434 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15437 /* Handle equality/inequality of complex constants. */
15438 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15440 tree rcond = fold_relational_const (code, type,
15441 TREE_REALPART (op0),
15442 TREE_REALPART (op1));
15443 tree icond = fold_relational_const (code, type,
15444 TREE_IMAGPART (op0),
15445 TREE_IMAGPART (op1));
15446 if (code == EQ_EXPR)
15447 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15448 else if (code == NE_EXPR)
15449 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15454 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15456 To compute GT, swap the arguments and do LT.
15457 To compute GE, do LT and invert the result.
15458 To compute LE, swap the arguments, do LT and invert the result.
15459 To compute NE, do EQ and invert the result.
15461 Therefore, the code below must handle only EQ and LT. */
15463 if (code == LE_EXPR || code == GT_EXPR)
15468 code = swap_tree_comparison (code);
15471 /* Note that it is safe to invert for real values here because we
15472 have already handled the one case that it matters. */
15475 if (code == NE_EXPR || code == GE_EXPR)
15478 code = invert_tree_comparison (code, false);
15481 /* Compute a result for LT or EQ if args permit;
15482 Otherwise return T. */
15483 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15485 if (code == EQ_EXPR)
15486 result = tree_int_cst_equal (op0, op1);
15487 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15488 result = INT_CST_LT_UNSIGNED (op0, op1);
15490 result = INT_CST_LT (op0, op1);
15497 return constant_boolean_node (result, type);
15500 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15501 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15505 fold_build_cleanup_point_expr (tree type, tree expr)
15507 /* If the expression does not have side effects then we don't have to wrap
15508 it with a cleanup point expression. */
15509 if (!TREE_SIDE_EFFECTS (expr))
15512 /* If the expression is a return, check to see if the expression inside the
15513 return has no side effects or the right hand side of the modify expression
15514 inside the return. If either don't have side effects set we don't need to
15515 wrap the expression in a cleanup point expression. Note we don't check the
15516 left hand side of the modify because it should always be a return decl. */
15517 if (TREE_CODE (expr) == RETURN_EXPR)
15519 tree op = TREE_OPERAND (expr, 0);
15520 if (!op || !TREE_SIDE_EFFECTS (op))
15522 op = TREE_OPERAND (op, 1);
15523 if (!TREE_SIDE_EFFECTS (op))
15527 return build1 (CLEANUP_POINT_EXPR, type, expr);
15530 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15531 of an indirection through OP0, or NULL_TREE if no simplification is
15535 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15541 subtype = TREE_TYPE (sub);
15542 if (!POINTER_TYPE_P (subtype))
15545 if (TREE_CODE (sub) == ADDR_EXPR)
15547 tree op = TREE_OPERAND (sub, 0);
15548 tree optype = TREE_TYPE (op);
15549 /* *&CONST_DECL -> to the value of the const decl. */
15550 if (TREE_CODE (op) == CONST_DECL)
15551 return DECL_INITIAL (op);
15552 /* *&p => p; make sure to handle *&"str"[cst] here. */
15553 if (type == optype)
15555 tree fop = fold_read_from_constant_string (op);
15561 /* *(foo *)&fooarray => fooarray[0] */
15562 else if (TREE_CODE (optype) == ARRAY_TYPE
15563 && type == TREE_TYPE (optype)
15564 && (!in_gimple_form
15565 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15567 tree type_domain = TYPE_DOMAIN (optype);
15568 tree min_val = size_zero_node;
15569 if (type_domain && TYPE_MIN_VALUE (type_domain))
15570 min_val = TYPE_MIN_VALUE (type_domain);
15572 && TREE_CODE (min_val) != INTEGER_CST)
15574 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15575 NULL_TREE, NULL_TREE);
15577 /* *(foo *)&complexfoo => __real__ complexfoo */
15578 else if (TREE_CODE (optype) == COMPLEX_TYPE
15579 && type == TREE_TYPE (optype))
15580 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15581 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15582 else if (TREE_CODE (optype) == VECTOR_TYPE
15583 && type == TREE_TYPE (optype))
15585 tree part_width = TYPE_SIZE (type);
15586 tree index = bitsize_int (0);
15587 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15591 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15592 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15594 tree op00 = TREE_OPERAND (sub, 0);
15595 tree op01 = TREE_OPERAND (sub, 1);
15598 if (TREE_CODE (op00) == ADDR_EXPR)
15601 op00 = TREE_OPERAND (op00, 0);
15602 op00type = TREE_TYPE (op00);
15604 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15605 if (TREE_CODE (op00type) == VECTOR_TYPE
15606 && type == TREE_TYPE (op00type))
15608 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15609 tree part_width = TYPE_SIZE (type);
15610 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15611 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15612 tree index = bitsize_int (indexi);
15614 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15615 return fold_build3_loc (loc,
15616 BIT_FIELD_REF, type, op00,
15617 part_width, index);
15620 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15621 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15622 && type == TREE_TYPE (op00type))
15624 tree size = TYPE_SIZE_UNIT (type);
15625 if (tree_int_cst_equal (size, op01))
15626 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15628 /* ((foo *)&fooarray)[1] => fooarray[1] */
15629 else if (TREE_CODE (op00type) == ARRAY_TYPE
15630 && type == TREE_TYPE (op00type))
15632 tree type_domain = TYPE_DOMAIN (op00type);
15633 tree min_val = size_zero_node;
15634 if (type_domain && TYPE_MIN_VALUE (type_domain))
15635 min_val = TYPE_MIN_VALUE (type_domain);
15636 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15637 TYPE_SIZE_UNIT (type));
15638 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15639 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15640 NULL_TREE, NULL_TREE);
15645 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15646 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15647 && type == TREE_TYPE (TREE_TYPE (subtype))
15648 && (!in_gimple_form
15649 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15652 tree min_val = size_zero_node;
15653 sub = build_fold_indirect_ref_loc (loc, sub);
15654 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15655 if (type_domain && TYPE_MIN_VALUE (type_domain))
15656 min_val = TYPE_MIN_VALUE (type_domain);
15658 && TREE_CODE (min_val) != INTEGER_CST)
15660 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15667 /* Builds an expression for an indirection through T, simplifying some
15671 build_fold_indirect_ref_loc (location_t loc, tree t)
15673 tree type = TREE_TYPE (TREE_TYPE (t));
15674 tree sub = fold_indirect_ref_1 (loc, type, t);
15679 return build1_loc (loc, INDIRECT_REF, type, t);
15682 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15685 fold_indirect_ref_loc (location_t loc, tree t)
15687 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15695 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15696 whose result is ignored. The type of the returned tree need not be
15697 the same as the original expression. */
15700 fold_ignored_result (tree t)
15702 if (!TREE_SIDE_EFFECTS (t))
15703 return integer_zero_node;
15706 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15709 t = TREE_OPERAND (t, 0);
15713 case tcc_comparison:
15714 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15715 t = TREE_OPERAND (t, 0);
15716 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15717 t = TREE_OPERAND (t, 1);
15722 case tcc_expression:
15723 switch (TREE_CODE (t))
15725 case COMPOUND_EXPR:
15726 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15728 t = TREE_OPERAND (t, 0);
15732 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15733 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15735 t = TREE_OPERAND (t, 0);
15748 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15749 This can only be applied to objects of a sizetype. */
15752 round_up_loc (location_t loc, tree value, int divisor)
15754 tree div = NULL_TREE;
15756 gcc_assert (divisor > 0);
15760 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15761 have to do anything. Only do this when we are not given a const,
15762 because in that case, this check is more expensive than just
15764 if (TREE_CODE (value) != INTEGER_CST)
15766 div = build_int_cst (TREE_TYPE (value), divisor);
15768 if (multiple_of_p (TREE_TYPE (value), value, div))
15772 /* If divisor is a power of two, simplify this to bit manipulation. */
15773 if (divisor == (divisor & -divisor))
15775 if (TREE_CODE (value) == INTEGER_CST)
15777 double_int val = tree_to_double_int (value);
15780 if ((val.low & (divisor - 1)) == 0)
15783 overflow_p = TREE_OVERFLOW (value);
15784 val.low &= ~(divisor - 1);
15785 val.low += divisor;
15793 return force_fit_type_double (TREE_TYPE (value), val,
15800 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15801 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15802 t = build_int_cst (TREE_TYPE (value), -divisor);
15803 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15809 div = build_int_cst (TREE_TYPE (value), divisor);
15810 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15811 value = size_binop_loc (loc, MULT_EXPR, value, div);
15817 /* Likewise, but round down. */
15820 round_down_loc (location_t loc, tree value, int divisor)
15822 tree div = NULL_TREE;
15824 gcc_assert (divisor > 0);
15828 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15829 have to do anything. Only do this when we are not given a const,
15830 because in that case, this check is more expensive than just
15832 if (TREE_CODE (value) != INTEGER_CST)
15834 div = build_int_cst (TREE_TYPE (value), divisor);
15836 if (multiple_of_p (TREE_TYPE (value), value, div))
15840 /* If divisor is a power of two, simplify this to bit manipulation. */
15841 if (divisor == (divisor & -divisor))
15845 t = build_int_cst (TREE_TYPE (value), -divisor);
15846 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15851 div = build_int_cst (TREE_TYPE (value), divisor);
15852 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15853 value = size_binop_loc (loc, MULT_EXPR, value, div);
15859 /* Returns the pointer to the base of the object addressed by EXP and
15860 extracts the information about the offset of the access, storing it
15861 to PBITPOS and POFFSET. */
15864 split_address_to_core_and_offset (tree exp,
15865 HOST_WIDE_INT *pbitpos, tree *poffset)
15868 enum machine_mode mode;
15869 int unsignedp, volatilep;
15870 HOST_WIDE_INT bitsize;
15871 location_t loc = EXPR_LOCATION (exp);
15873 if (TREE_CODE (exp) == ADDR_EXPR)
15875 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15876 poffset, &mode, &unsignedp, &volatilep,
15878 core = build_fold_addr_expr_loc (loc, core);
15884 *poffset = NULL_TREE;
15890 /* Returns true if addresses of E1 and E2 differ by a constant, false
15891 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15894 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15897 HOST_WIDE_INT bitpos1, bitpos2;
15898 tree toffset1, toffset2, tdiff, type;
15900 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15901 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15903 if (bitpos1 % BITS_PER_UNIT != 0
15904 || bitpos2 % BITS_PER_UNIT != 0
15905 || !operand_equal_p (core1, core2, 0))
15908 if (toffset1 && toffset2)
15910 type = TREE_TYPE (toffset1);
15911 if (type != TREE_TYPE (toffset2))
15912 toffset2 = fold_convert (type, toffset2);
15914 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15915 if (!cst_and_fits_in_hwi (tdiff))
15918 *diff = int_cst_value (tdiff);
15920 else if (toffset1 || toffset2)
15922 /* If only one of the offsets is non-constant, the difference cannot
15929 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15933 /* Simplify the floating point expression EXP when the sign of the
15934 result is not significant. Return NULL_TREE if no simplification
15938 fold_strip_sign_ops (tree exp)
15941 location_t loc = EXPR_LOCATION (exp);
15943 switch (TREE_CODE (exp))
15947 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15948 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15952 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15954 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15955 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15956 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15957 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15958 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15959 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15962 case COMPOUND_EXPR:
15963 arg0 = TREE_OPERAND (exp, 0);
15964 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15966 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15970 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15971 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15973 return fold_build3_loc (loc,
15974 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15975 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15976 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15981 const enum built_in_function fcode = builtin_mathfn_code (exp);
15984 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15985 /* Strip copysign function call, return the 1st argument. */
15986 arg0 = CALL_EXPR_ARG (exp, 0);
15987 arg1 = CALL_EXPR_ARG (exp, 1);
15988 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15991 /* Strip sign ops from the argument of "odd" math functions. */
15992 if (negate_mathfn_p (fcode))
15994 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15996 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);