1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc != UNKNOWN_LOCATION ? tloc : loc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
167 SET_EXPR_LOCATION (x, loc);
173 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
174 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
175 and SUM1. Then this yields nonzero if overflow occurred during the
178 Overflow occurs if A and B have the same sign, but A and SUM differ in
179 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
181 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
183 /* If ARG2 divides ARG1 with zero remainder, carries out the division
184 of type CODE and returns the quotient.
185 Otherwise returns NULL_TREE. */
188 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
193 /* The sign of the division is according to operand two, that
194 does the correct thing for POINTER_PLUS_EXPR where we want
195 a signed division. */
196 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
197 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
198 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
201 quo = double_int_divmod (tree_to_double_int (arg1),
202 tree_to_double_int (arg2),
205 if (double_int_zero_p (rem))
206 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
211 /* This is nonzero if we should defer warnings about undefined
212 overflow. This facility exists because these warnings are a
213 special case. The code to estimate loop iterations does not want
214 to issue any warnings, since it works with expressions which do not
215 occur in user code. Various bits of cleanup code call fold(), but
216 only use the result if it has certain characteristics (e.g., is a
217 constant); that code only wants to issue a warning if the result is
220 static int fold_deferring_overflow_warnings;
222 /* If a warning about undefined overflow is deferred, this is the
223 warning. Note that this may cause us to turn two warnings into
224 one, but that is fine since it is sufficient to only give one
225 warning per expression. */
227 static const char* fold_deferred_overflow_warning;
229 /* If a warning about undefined overflow is deferred, this is the
230 level at which the warning should be emitted. */
232 static enum warn_strict_overflow_code fold_deferred_overflow_code;
234 /* Start deferring overflow warnings. We could use a stack here to
235 permit nested calls, but at present it is not necessary. */
238 fold_defer_overflow_warnings (void)
240 ++fold_deferring_overflow_warnings;
243 /* Stop deferring overflow warnings. If there is a pending warning,
244 and ISSUE is true, then issue the warning if appropriate. STMT is
245 the statement with which the warning should be associated (used for
246 location information); STMT may be NULL. CODE is the level of the
247 warning--a warn_strict_overflow_code value. This function will use
248 the smaller of CODE and the deferred code when deciding whether to
249 issue the warning. CODE may be zero to mean to always use the
253 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
258 gcc_assert (fold_deferring_overflow_warnings > 0);
259 --fold_deferring_overflow_warnings;
260 if (fold_deferring_overflow_warnings > 0)
262 if (fold_deferred_overflow_warning != NULL
264 && code < (int) fold_deferred_overflow_code)
265 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
269 warnmsg = fold_deferred_overflow_warning;
270 fold_deferred_overflow_warning = NULL;
272 if (!issue || warnmsg == NULL)
275 if (gimple_no_warning_p (stmt))
278 /* Use the smallest code level when deciding to issue the
280 if (code == 0 || code > (int) fold_deferred_overflow_code)
281 code = fold_deferred_overflow_code;
283 if (!issue_strict_overflow_warning (code))
287 locus = input_location;
289 locus = gimple_location (stmt);
290 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
293 /* Stop deferring overflow warnings, ignoring any deferred
297 fold_undefer_and_ignore_overflow_warnings (void)
299 fold_undefer_overflow_warnings (false, NULL, 0);
302 /* Whether we are deferring overflow warnings. */
305 fold_deferring_overflow_warnings_p (void)
307 return fold_deferring_overflow_warnings > 0;
310 /* This is called when we fold something based on the fact that signed
311 overflow is undefined. */
314 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
316 if (fold_deferring_overflow_warnings > 0)
318 if (fold_deferred_overflow_warning == NULL
319 || wc < fold_deferred_overflow_code)
321 fold_deferred_overflow_warning = gmsgid;
322 fold_deferred_overflow_code = wc;
325 else if (issue_strict_overflow_warning (wc))
326 warning (OPT_Wstrict_overflow, gmsgid);
329 /* Return true if the built-in mathematical function specified by CODE
330 is odd, i.e. -f(x) == f(-x). */
333 negate_mathfn_p (enum built_in_function code)
337 CASE_FLT_FN (BUILT_IN_ASIN):
338 CASE_FLT_FN (BUILT_IN_ASINH):
339 CASE_FLT_FN (BUILT_IN_ATAN):
340 CASE_FLT_FN (BUILT_IN_ATANH):
341 CASE_FLT_FN (BUILT_IN_CASIN):
342 CASE_FLT_FN (BUILT_IN_CASINH):
343 CASE_FLT_FN (BUILT_IN_CATAN):
344 CASE_FLT_FN (BUILT_IN_CATANH):
345 CASE_FLT_FN (BUILT_IN_CBRT):
346 CASE_FLT_FN (BUILT_IN_CPROJ):
347 CASE_FLT_FN (BUILT_IN_CSIN):
348 CASE_FLT_FN (BUILT_IN_CSINH):
349 CASE_FLT_FN (BUILT_IN_CTAN):
350 CASE_FLT_FN (BUILT_IN_CTANH):
351 CASE_FLT_FN (BUILT_IN_ERF):
352 CASE_FLT_FN (BUILT_IN_LLROUND):
353 CASE_FLT_FN (BUILT_IN_LROUND):
354 CASE_FLT_FN (BUILT_IN_ROUND):
355 CASE_FLT_FN (BUILT_IN_SIN):
356 CASE_FLT_FN (BUILT_IN_SINH):
357 CASE_FLT_FN (BUILT_IN_TAN):
358 CASE_FLT_FN (BUILT_IN_TANH):
359 CASE_FLT_FN (BUILT_IN_TRUNC):
362 CASE_FLT_FN (BUILT_IN_LLRINT):
363 CASE_FLT_FN (BUILT_IN_LRINT):
364 CASE_FLT_FN (BUILT_IN_NEARBYINT):
365 CASE_FLT_FN (BUILT_IN_RINT):
366 return !flag_rounding_math;
374 /* Check whether we may negate an integer constant T without causing
378 may_negate_without_overflow_p (const_tree t)
380 unsigned HOST_WIDE_INT val;
384 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386 type = TREE_TYPE (t);
387 if (TYPE_UNSIGNED (type))
390 prec = TYPE_PRECISION (type);
391 if (prec > HOST_BITS_PER_WIDE_INT)
393 if (TREE_INT_CST_LOW (t) != 0)
395 prec -= HOST_BITS_PER_WIDE_INT;
396 val = TREE_INT_CST_HIGH (t);
399 val = TREE_INT_CST_LOW (t);
400 if (prec < HOST_BITS_PER_WIDE_INT)
401 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
402 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
405 /* Determine whether an expression T can be cheaply negated using
406 the function negate_expr without introducing undefined overflow. */
409 negate_expr_p (tree t)
416 type = TREE_TYPE (t);
419 switch (TREE_CODE (t))
422 if (TYPE_OVERFLOW_WRAPS (type))
425 /* Check that -CST will not overflow type. */
426 return may_negate_without_overflow_p (t);
428 return (INTEGRAL_TYPE_P (type)
429 && TYPE_OVERFLOW_WRAPS (type));
436 /* We want to canonicalize to positive real constants. Pretend
437 that only negative ones can be easily negated. */
438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
441 return negate_expr_p (TREE_REALPART (t))
442 && negate_expr_p (TREE_IMAGPART (t));
445 return negate_expr_p (TREE_OPERAND (t, 0))
446 && negate_expr_p (TREE_OPERAND (t, 1));
449 return negate_expr_p (TREE_OPERAND (t, 0));
452 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
453 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
455 /* -(A + B) -> (-B) - A. */
456 if (negate_expr_p (TREE_OPERAND (t, 1))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1)))
460 /* -(A + B) -> (-A) - B. */
461 return negate_expr_p (TREE_OPERAND (t, 0));
464 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
465 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
467 && reorder_operands_p (TREE_OPERAND (t, 0),
468 TREE_OPERAND (t, 1));
471 if (TYPE_UNSIGNED (TREE_TYPE (t)))
477 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
478 return negate_expr_p (TREE_OPERAND (t, 1))
479 || negate_expr_p (TREE_OPERAND (t, 0));
487 /* In general we can't negate A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. But if overflow is
490 undefined, we can negate, because - (INT_MIN / 1) is an
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
495 return negate_expr_p (TREE_OPERAND (t, 1))
496 || negate_expr_p (TREE_OPERAND (t, 0));
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
504 return negate_expr_p (tem);
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (builtin_mathfn_code (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (TREE_INT_CST_HIGH (op1) == 0
520 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
521 == TREE_INT_CST_LOW (op1))
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_int_cst (type, 1));
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 tem = fold_negate_const (t, type);
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 return TREE_OPERAND (t, 0);
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1))
605 && reorder_operands_p (TREE_OPERAND (t, 0),
606 TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
627 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
633 if (TYPE_UNSIGNED (type))
639 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
643 return fold_build2_loc (loc, TREE_CODE (t), type,
644 TREE_OPERAND (t, 0), negate_expr (tem));
645 tem = TREE_OPERAND (t, 0);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 negate_expr (tem), TREE_OPERAND (t, 1));
657 /* In general we can't negate A / B, because if A is INT_MIN and
658 B is 1, we may turn this into INT_MIN / -1 which is undefined
659 and actually traps on some architectures. But if overflow is
660 undefined, we can negate, because - (INT_MIN / 1) is an
662 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
664 const char * const warnmsg = G_("assuming signed overflow does not "
665 "occur when negating a division");
666 tem = TREE_OPERAND (t, 1);
667 if (negate_expr_p (tem))
669 if (INTEGRAL_TYPE_P (type)
670 && (TREE_CODE (tem) != INTEGER_CST
671 || integer_onep (tem)))
672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 negate_expr (tem), TREE_OPERAND (t, 1));
690 /* Convert -((double)float) into (double)(-float). */
691 if (TREE_CODE (type) == REAL_TYPE)
693 tem = strip_float_extensions (t);
694 if (tem != t && negate_expr_p (tem))
695 return fold_convert_loc (loc, type, negate_expr (tem));
700 /* Negate -f(x) as f(-x). */
701 if (negate_mathfn_p (builtin_mathfn_code (t))
702 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
706 fndecl = get_callee_fndecl (t);
707 arg = negate_expr (CALL_EXPR_ARG (t, 0));
708 return build_call_expr_loc (loc, fndecl, 1, arg);
713 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
714 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
716 tree op1 = TREE_OPERAND (t, 1);
717 if (TREE_INT_CST_HIGH (op1) == 0
718 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
719 == TREE_INT_CST_LOW (op1))
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
755 tem = fold_negate_expr (loc, t);
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
782 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
783 tree *minus_litp, int negate_p)
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
836 *minus_litp = *litp, *litp = 0;
838 *conp = negate_expr (*conp);
840 var = negate_expr (var);
842 else if (TREE_CONSTANT (in))
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
916 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time. */
942 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
944 double_int op1, op2, res, tmp;
946 tree type = TREE_TYPE (arg1);
947 bool uns = TYPE_UNSIGNED (type);
949 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
950 bool overflow = false;
952 op1 = tree_to_double_int (arg1);
953 op2 = tree_to_double_int (arg2);
958 res = double_int_ior (op1, op2);
962 res = double_int_xor (op1, op2);
966 res = double_int_and (op1, op2);
970 res = double_int_rshift (op1, double_int_to_shwi (op2),
971 TYPE_PRECISION (type), !uns);
975 /* It's unclear from the C standard whether shifts can overflow.
976 The following code ignores overflow; perhaps a C standard
977 interpretation ruling is needed. */
978 res = double_int_lshift (op1, double_int_to_shwi (op2),
979 TYPE_PRECISION (type), !uns);
983 res = double_int_rrotate (op1, double_int_to_shwi (op2),
984 TYPE_PRECISION (type));
988 res = double_int_lrotate (op1, double_int_to_shwi (op2),
989 TYPE_PRECISION (type));
993 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
994 &res.low, &res.high);
998 neg_double (op2.low, op2.high, &res.low, &res.high);
999 add_double (op1.low, op1.high, res.low, res.high,
1000 &res.low, &res.high);
1001 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1005 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1006 &res.low, &res.high);
1009 case TRUNC_DIV_EXPR:
1010 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1011 case EXACT_DIV_EXPR:
1012 /* This is a shortcut for a common special case. */
1013 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1014 && !TREE_OVERFLOW (arg1)
1015 && !TREE_OVERFLOW (arg2)
1016 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1018 if (code == CEIL_DIV_EXPR)
1019 op1.low += op2.low - 1;
1021 res.low = op1.low / op2.low, res.high = 0;
1025 /* ... fall through ... */
1027 case ROUND_DIV_EXPR:
1028 if (double_int_zero_p (op2))
1030 if (double_int_one_p (op2))
1035 if (double_int_equal_p (op1, op2)
1036 && ! double_int_zero_p (op1))
1038 res = double_int_one;
1041 overflow = div_and_round_double (code, uns,
1042 op1.low, op1.high, op2.low, op2.high,
1043 &res.low, &res.high,
1044 &tmp.low, &tmp.high);
1047 case TRUNC_MOD_EXPR:
1048 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1049 /* This is a shortcut for a common special case. */
1050 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1051 && !TREE_OVERFLOW (arg1)
1052 && !TREE_OVERFLOW (arg2)
1053 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1055 if (code == CEIL_MOD_EXPR)
1056 op1.low += op2.low - 1;
1057 res.low = op1.low % op2.low, res.high = 0;
1061 /* ... fall through ... */
1063 case ROUND_MOD_EXPR:
1064 if (double_int_zero_p (op2))
1066 overflow = div_and_round_double (code, uns,
1067 op1.low, op1.high, op2.low, op2.high,
1068 &tmp.low, &tmp.high,
1069 &res.low, &res.high);
1073 res = double_int_min (op1, op2, uns);
1077 res = double_int_max (op1, op2, uns);
1084 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1085 ((!uns || is_sizetype) && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1091 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1092 constant. We assume ARG1 and ARG2 have the same data type, or at least
1093 are the same kind of constant and the same machine mode. Return zero if
1094 combining the constants is not allowed in the current operating mode. */
1097 const_binop (enum tree_code code, tree arg1, tree arg2)
1099 /* Sanity check for the recursive cases. */
1106 if (TREE_CODE (arg1) == INTEGER_CST)
1107 return int_const_binop (code, arg1, arg2);
1109 if (TREE_CODE (arg1) == REAL_CST)
1111 enum machine_mode mode;
1114 REAL_VALUE_TYPE value;
1115 REAL_VALUE_TYPE result;
1119 /* The following codes are handled by real_arithmetic. */
1134 d1 = TREE_REAL_CST (arg1);
1135 d2 = TREE_REAL_CST (arg2);
1137 type = TREE_TYPE (arg1);
1138 mode = TYPE_MODE (type);
1140 /* Don't perform operation if we honor signaling NaNs and
1141 either operand is a NaN. */
1142 if (HONOR_SNANS (mode)
1143 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1146 /* Don't perform operation if it would raise a division
1147 by zero exception. */
1148 if (code == RDIV_EXPR
1149 && REAL_VALUES_EQUAL (d2, dconst0)
1150 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1153 /* If either operand is a NaN, just return it. Otherwise, set up
1154 for floating-point trap; we return an overflow. */
1155 if (REAL_VALUE_ISNAN (d1))
1157 else if (REAL_VALUE_ISNAN (d2))
1160 inexact = real_arithmetic (&value, code, &d1, &d2);
1161 real_convert (&result, mode, &value);
1163 /* Don't constant fold this floating point operation if
1164 the result has overflowed and flag_trapping_math. */
1165 if (flag_trapping_math
1166 && MODE_HAS_INFINITIES (mode)
1167 && REAL_VALUE_ISINF (result)
1168 && !REAL_VALUE_ISINF (d1)
1169 && !REAL_VALUE_ISINF (d2))
1172 /* Don't constant fold this floating point operation if the
1173 result may dependent upon the run-time rounding mode and
1174 flag_rounding_math is set, or if GCC's software emulation
1175 is unable to accurately represent the result. */
1176 if ((flag_rounding_math
1177 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1178 && (inexact || !real_identical (&result, &value)))
1181 t = build_real (type, result);
1183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1187 if (TREE_CODE (arg1) == FIXED_CST)
1189 FIXED_VALUE_TYPE f1;
1190 FIXED_VALUE_TYPE f2;
1191 FIXED_VALUE_TYPE result;
1196 /* The following codes are handled by fixed_arithmetic. */
1202 case TRUNC_DIV_EXPR:
1203 f2 = TREE_FIXED_CST (arg2);
1208 f2.data.high = TREE_INT_CST_HIGH (arg2);
1209 f2.data.low = TREE_INT_CST_LOW (arg2);
1217 f1 = TREE_FIXED_CST (arg1);
1218 type = TREE_TYPE (arg1);
1219 sat_p = TYPE_SATURATING (type);
1220 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1221 t = build_fixed (type, result);
1222 /* Propagate overflow flags. */
1223 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1224 TREE_OVERFLOW (t) = 1;
1228 if (TREE_CODE (arg1) == COMPLEX_CST)
1230 tree type = TREE_TYPE (arg1);
1231 tree r1 = TREE_REALPART (arg1);
1232 tree i1 = TREE_IMAGPART (arg1);
1233 tree r2 = TREE_REALPART (arg2);
1234 tree i2 = TREE_IMAGPART (arg2);
1241 real = const_binop (code, r1, r2);
1242 imag = const_binop (code, i1, i2);
1246 if (COMPLEX_FLOAT_TYPE_P (type))
1247 return do_mpc_arg2 (arg1, arg2, type,
1248 /* do_nonfinite= */ folding_initializer,
1251 real = const_binop (MINUS_EXPR,
1252 const_binop (MULT_EXPR, r1, r2),
1253 const_binop (MULT_EXPR, i1, i2));
1254 imag = const_binop (PLUS_EXPR,
1255 const_binop (MULT_EXPR, r1, i2),
1256 const_binop (MULT_EXPR, i1, r2));
1260 if (COMPLEX_FLOAT_TYPE_P (type))
1261 return do_mpc_arg2 (arg1, arg2, type,
1262 /* do_nonfinite= */ folding_initializer,
1265 case TRUNC_DIV_EXPR:
1267 case FLOOR_DIV_EXPR:
1268 case ROUND_DIV_EXPR:
1269 if (flag_complex_method == 0)
1271 /* Keep this algorithm in sync with
1272 tree-complex.c:expand_complex_div_straight().
1274 Expand complex division to scalars, straightforward algorithm.
1275 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1279 = const_binop (PLUS_EXPR,
1280 const_binop (MULT_EXPR, r2, r2),
1281 const_binop (MULT_EXPR, i2, i2));
1283 = const_binop (PLUS_EXPR,
1284 const_binop (MULT_EXPR, r1, r2),
1285 const_binop (MULT_EXPR, i1, i2));
1287 = const_binop (MINUS_EXPR,
1288 const_binop (MULT_EXPR, i1, r2),
1289 const_binop (MULT_EXPR, r1, i2));
1291 real = const_binop (code, t1, magsquared);
1292 imag = const_binop (code, t2, magsquared);
1296 /* Keep this algorithm in sync with
1297 tree-complex.c:expand_complex_div_wide().
1299 Expand complex division to scalars, modified algorithm to minimize
1300 overflow with wide input ranges. */
1301 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1302 fold_abs_const (r2, TREE_TYPE (type)),
1303 fold_abs_const (i2, TREE_TYPE (type)));
1305 if (integer_nonzerop (compare))
1307 /* In the TRUE branch, we compute
1309 div = (br * ratio) + bi;
1310 tr = (ar * ratio) + ai;
1311 ti = (ai * ratio) - ar;
1314 tree ratio = const_binop (code, r2, i2);
1315 tree div = const_binop (PLUS_EXPR, i2,
1316 const_binop (MULT_EXPR, r2, ratio));
1317 real = const_binop (MULT_EXPR, r1, ratio);
1318 real = const_binop (PLUS_EXPR, real, i1);
1319 real = const_binop (code, real, div);
1321 imag = const_binop (MULT_EXPR, i1, ratio);
1322 imag = const_binop (MINUS_EXPR, imag, r1);
1323 imag = const_binop (code, imag, div);
1327 /* In the FALSE branch, we compute
1329 divisor = (d * ratio) + c;
1330 tr = (b * ratio) + a;
1331 ti = b - (a * ratio);
1334 tree ratio = const_binop (code, i2, r2);
1335 tree div = const_binop (PLUS_EXPR, r2,
1336 const_binop (MULT_EXPR, i2, ratio));
1338 real = const_binop (MULT_EXPR, i1, ratio);
1339 real = const_binop (PLUS_EXPR, real, r1);
1340 real = const_binop (code, real, div);
1342 imag = const_binop (MULT_EXPR, r1, ratio);
1343 imag = const_binop (MINUS_EXPR, i1, imag);
1344 imag = const_binop (code, imag, div);
1354 return build_complex (type, real, imag);
1357 if (TREE_CODE (arg1) == VECTOR_CST)
1359 tree type = TREE_TYPE(arg1);
1360 int count = TYPE_VECTOR_SUBPARTS (type), i;
1361 tree elements1, elements2, list = NULL_TREE;
1363 if(TREE_CODE(arg2) != VECTOR_CST)
1366 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1367 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1369 for (i = 0; i < count; i++)
1371 tree elem1, elem2, elem;
1373 /* The trailing elements can be empty and should be treated as 0 */
1375 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1378 elem1 = TREE_VALUE(elements1);
1379 elements1 = TREE_CHAIN (elements1);
1383 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1386 elem2 = TREE_VALUE(elements2);
1387 elements2 = TREE_CHAIN (elements2);
1390 elem = const_binop (code, elem1, elem2);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if(elem == NULL_TREE)
1397 list = tree_cons (NULL_TREE, elem, list);
1399 return build_vector(type, nreverse(list));
1404 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1405 indicates which particular sizetype to create. */
1408 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1410 return build_int_cst (sizetype_tab[(int) kind], number);
1413 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1414 is a tree code. The type of the result is taken from the operands.
1415 Both must be equivalent integer types, ala int_binop_types_match_p.
1416 If the operands are constant, so is the result. */
1419 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1421 tree type = TREE_TYPE (arg0);
1423 if (arg0 == error_mark_node || arg1 == error_mark_node)
1424 return error_mark_node;
1426 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1429 /* Handle the special case of two integer constants faster. */
1430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1432 /* And some specific cases even faster than that. */
1433 if (code == PLUS_EXPR)
1435 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1437 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1440 else if (code == MINUS_EXPR)
1442 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1445 else if (code == MULT_EXPR)
1447 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1451 /* Handle general case of two integer constants. */
1452 return int_const_binop (code, arg0, arg1);
1455 return fold_build2_loc (loc, code, type, arg0, arg1);
1458 /* Given two values, either both of sizetype or both of bitsizetype,
1459 compute the difference between the two values. Return the value
1460 in signed type corresponding to the type of the operands. */
1463 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1465 tree type = TREE_TYPE (arg0);
1468 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1471 /* If the type is already signed, just do the simple thing. */
1472 if (!TYPE_UNSIGNED (type))
1473 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1475 if (type == sizetype)
1477 else if (type == bitsizetype)
1478 ctype = sbitsizetype;
1480 ctype = signed_type_for (type);
1482 /* If either operand is not a constant, do the conversions to the signed
1483 type and subtract. The hardware will do the right thing with any
1484 overflow in the subtraction. */
1485 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1486 return size_binop_loc (loc, MINUS_EXPR,
1487 fold_convert_loc (loc, ctype, arg0),
1488 fold_convert_loc (loc, ctype, arg1));
1490 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1491 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1492 overflow) and negate (which can't either). Special-case a result
1493 of zero while we're here. */
1494 if (tree_int_cst_equal (arg0, arg1))
1495 return build_int_cst (ctype, 0);
1496 else if (tree_int_cst_lt (arg1, arg0))
1497 return fold_convert_loc (loc, ctype,
1498 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1500 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1501 fold_convert_loc (loc, ctype,
1502 size_binop_loc (loc,
1507 /* A subroutine of fold_convert_const handling conversions of an
1508 INTEGER_CST to another integer type. */
1511 fold_convert_const_int_from_int (tree type, const_tree arg1)
1515 /* Given an integer constant, make new constant with new type,
1516 appropriately sign-extended or truncated. */
1517 t = force_fit_type_double (type, tree_to_double_int (arg1),
1518 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1519 (TREE_INT_CST_HIGH (arg1) < 0
1520 && (TYPE_UNSIGNED (type)
1521 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1522 | TREE_OVERFLOW (arg1));
1527 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1528 to an integer type. */
1531 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1536 /* The following code implements the floating point to integer
1537 conversion rules required by the Java Language Specification,
1538 that IEEE NaNs are mapped to zero and values that overflow
1539 the target precision saturate, i.e. values greater than
1540 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1541 are mapped to INT_MIN. These semantics are allowed by the
1542 C and C++ standards that simply state that the behavior of
1543 FP-to-integer conversion is unspecified upon overflow. */
1547 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1551 case FIX_TRUNC_EXPR:
1552 real_trunc (&r, VOIDmode, &x);
1559 /* If R is NaN, return zero and show we have an overflow. */
1560 if (REAL_VALUE_ISNAN (r))
1563 val = double_int_zero;
1566 /* See if R is less than the lower bound or greater than the
1571 tree lt = TYPE_MIN_VALUE (type);
1572 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1573 if (REAL_VALUES_LESS (r, l))
1576 val = tree_to_double_int (lt);
1582 tree ut = TYPE_MAX_VALUE (type);
1585 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1586 if (REAL_VALUES_LESS (u, r))
1589 val = tree_to_double_int (ut);
1595 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1597 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1601 /* A subroutine of fold_convert_const handling conversions of a
1602 FIXED_CST to an integer type. */
1605 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1608 double_int temp, temp_trunc;
1611 /* Right shift FIXED_CST to temp by fbit. */
1612 temp = TREE_FIXED_CST (arg1).data;
1613 mode = TREE_FIXED_CST (arg1).mode;
1614 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1616 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1617 HOST_BITS_PER_DOUBLE_INT,
1618 SIGNED_FIXED_POINT_MODE_P (mode));
1620 /* Left shift temp to temp_trunc by fbit. */
1621 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1622 HOST_BITS_PER_DOUBLE_INT,
1623 SIGNED_FIXED_POINT_MODE_P (mode));
1627 temp = double_int_zero;
1628 temp_trunc = double_int_zero;
1631 /* If FIXED_CST is negative, we need to round the value toward 0.
1632 By checking if the fractional bits are not zero to add 1 to temp. */
1633 if (SIGNED_FIXED_POINT_MODE_P (mode)
1634 && double_int_negative_p (temp_trunc)
1635 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1636 temp = double_int_add (temp, double_int_one);
1638 /* Given a fixed-point constant, make new constant with new type,
1639 appropriately sign-extended or truncated. */
1640 t = force_fit_type_double (type, temp, -1,
1641 (double_int_negative_p (temp)
1642 && (TYPE_UNSIGNED (type)
1643 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1644 | TREE_OVERFLOW (arg1));
1649 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1650 to another floating point type. */
1653 fold_convert_const_real_from_real (tree type, const_tree arg1)
1655 REAL_VALUE_TYPE value;
1658 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1659 t = build_real (type, value);
1661 /* If converting an infinity or NAN to a representation that doesn't
1662 have one, set the overflow bit so that we can produce some kind of
1663 error message at the appropriate point if necessary. It's not the
1664 most user-friendly message, but it's better than nothing. */
1665 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1669 && !MODE_HAS_NANS (TYPE_MODE (type)))
1670 TREE_OVERFLOW (t) = 1;
1671 /* Regular overflow, conversion produced an infinity in a mode that
1672 can't represent them. */
1673 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1674 && REAL_VALUE_ISINF (value)
1675 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1676 TREE_OVERFLOW (t) = 1;
1678 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1682 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1683 to a floating point type. */
1686 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1688 REAL_VALUE_TYPE value;
1691 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1692 t = build_real (type, value);
1694 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1698 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1699 to another fixed-point type. */
1702 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1704 FIXED_VALUE_TYPE value;
1708 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1709 TYPE_SATURATING (type));
1710 t = build_fixed (type, value);
1712 /* Propagate overflow flags. */
1713 if (overflow_p | TREE_OVERFLOW (arg1))
1714 TREE_OVERFLOW (t) = 1;
1718 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1719 to a fixed-point type. */
1722 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1724 FIXED_VALUE_TYPE value;
1728 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1729 TREE_INT_CST (arg1),
1730 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1731 TYPE_SATURATING (type));
1732 t = build_fixed (type, value);
1734 /* Propagate overflow flags. */
1735 if (overflow_p | TREE_OVERFLOW (arg1))
1736 TREE_OVERFLOW (t) = 1;
1740 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1741 to a fixed-point type. */
1744 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1746 FIXED_VALUE_TYPE value;
1750 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1751 &TREE_REAL_CST (arg1),
1752 TYPE_SATURATING (type));
1753 t = build_fixed (type, value);
1755 /* Propagate overflow flags. */
1756 if (overflow_p | TREE_OVERFLOW (arg1))
1757 TREE_OVERFLOW (t) = 1;
1761 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1762 type TYPE. If no simplification can be done return NULL_TREE. */
1765 fold_convert_const (enum tree_code code, tree type, tree arg1)
1767 if (TREE_TYPE (arg1) == type)
1770 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1771 || TREE_CODE (type) == OFFSET_TYPE)
1773 if (TREE_CODE (arg1) == INTEGER_CST)
1774 return fold_convert_const_int_from_int (type, arg1);
1775 else if (TREE_CODE (arg1) == REAL_CST)
1776 return fold_convert_const_int_from_real (code, type, arg1);
1777 else if (TREE_CODE (arg1) == FIXED_CST)
1778 return fold_convert_const_int_from_fixed (type, arg1);
1780 else if (TREE_CODE (type) == REAL_TYPE)
1782 if (TREE_CODE (arg1) == INTEGER_CST)
1783 return build_real_from_int_cst (type, arg1);
1784 else if (TREE_CODE (arg1) == REAL_CST)
1785 return fold_convert_const_real_from_real (type, arg1);
1786 else if (TREE_CODE (arg1) == FIXED_CST)
1787 return fold_convert_const_real_from_fixed (type, arg1);
1789 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1791 if (TREE_CODE (arg1) == FIXED_CST)
1792 return fold_convert_const_fixed_from_fixed (type, arg1);
1793 else if (TREE_CODE (arg1) == INTEGER_CST)
1794 return fold_convert_const_fixed_from_int (type, arg1);
1795 else if (TREE_CODE (arg1) == REAL_CST)
1796 return fold_convert_const_fixed_from_real (type, arg1);
1801 /* Construct a vector of zero elements of vector type TYPE. */
1804 build_zero_vector (tree type)
1808 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1809 return build_vector_from_val (type, t);
1812 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1815 fold_convertible_p (const_tree type, const_tree arg)
1817 tree orig = TREE_TYPE (arg);
1822 if (TREE_CODE (arg) == ERROR_MARK
1823 || TREE_CODE (type) == ERROR_MARK
1824 || TREE_CODE (orig) == ERROR_MARK)
1827 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1830 switch (TREE_CODE (type))
1832 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1833 case POINTER_TYPE: case REFERENCE_TYPE:
1835 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1836 || TREE_CODE (orig) == OFFSET_TYPE)
1838 return (TREE_CODE (orig) == VECTOR_TYPE
1839 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1842 case FIXED_POINT_TYPE:
1846 return TREE_CODE (type) == TREE_CODE (orig);
1853 /* Convert expression ARG to type TYPE. Used by the middle-end for
1854 simple conversions in preference to calling the front-end's convert. */
1857 fold_convert_loc (location_t loc, tree type, tree arg)
1859 tree orig = TREE_TYPE (arg);
1865 if (TREE_CODE (arg) == ERROR_MARK
1866 || TREE_CODE (type) == ERROR_MARK
1867 || TREE_CODE (orig) == ERROR_MARK)
1868 return error_mark_node;
1870 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1871 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1873 switch (TREE_CODE (type))
1876 case REFERENCE_TYPE:
1877 /* Handle conversions between pointers to different address spaces. */
1878 if (POINTER_TYPE_P (orig)
1879 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1880 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1881 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1884 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1886 if (TREE_CODE (arg) == INTEGER_CST)
1888 tem = fold_convert_const (NOP_EXPR, type, arg);
1889 if (tem != NULL_TREE)
1892 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1893 || TREE_CODE (orig) == OFFSET_TYPE)
1894 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 if (TREE_CODE (orig) == COMPLEX_TYPE)
1896 return fold_convert_loc (loc, type,
1897 fold_build1_loc (loc, REALPART_EXPR,
1898 TREE_TYPE (orig), arg));
1899 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1900 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1901 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1904 if (TREE_CODE (arg) == INTEGER_CST)
1906 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1910 else if (TREE_CODE (arg) == REAL_CST)
1912 tem = fold_convert_const (NOP_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1916 else if (TREE_CODE (arg) == FIXED_CST)
1918 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1923 switch (TREE_CODE (orig))
1926 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1927 case POINTER_TYPE: case REFERENCE_TYPE:
1928 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1931 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1933 case FIXED_POINT_TYPE:
1934 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1937 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1938 return fold_convert_loc (loc, type, tem);
1944 case FIXED_POINT_TYPE:
1945 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1946 || TREE_CODE (arg) == REAL_CST)
1948 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1949 if (tem != NULL_TREE)
1950 goto fold_convert_exit;
1953 switch (TREE_CODE (orig))
1955 case FIXED_POINT_TYPE:
1960 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1963 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1964 return fold_convert_loc (loc, type, tem);
1971 switch (TREE_CODE (orig))
1974 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1975 case POINTER_TYPE: case REFERENCE_TYPE:
1977 case FIXED_POINT_TYPE:
1978 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1979 fold_convert_loc (loc, TREE_TYPE (type), arg),
1980 fold_convert_loc (loc, TREE_TYPE (type),
1981 integer_zero_node));
1986 if (TREE_CODE (arg) == COMPLEX_EXPR)
1988 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1989 TREE_OPERAND (arg, 0));
1990 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1991 TREE_OPERAND (arg, 1));
1992 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1995 arg = save_expr (arg);
1996 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1997 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1998 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1999 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2000 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2008 if (integer_zerop (arg))
2009 return build_zero_vector (type);
2010 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2011 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2012 || TREE_CODE (orig) == VECTOR_TYPE);
2013 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2016 tem = fold_ignored_result (arg);
2017 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2023 protected_set_expr_location_unshare (tem, loc);
2027 /* Return false if expr can be assumed not to be an lvalue, true
2031 maybe_lvalue_p (const_tree x)
2033 /* We only need to wrap lvalue tree codes. */
2034 switch (TREE_CODE (x))
2047 case ARRAY_RANGE_REF:
2053 case PREINCREMENT_EXPR:
2054 case PREDECREMENT_EXPR:
2056 case TRY_CATCH_EXPR:
2057 case WITH_CLEANUP_EXPR:
2066 /* Assume the worst for front-end tree codes. */
2067 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2075 /* Return an expr equal to X but certainly not valid as an lvalue. */
2078 non_lvalue_loc (location_t loc, tree x)
2080 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2085 if (! maybe_lvalue_p (x))
2087 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2090 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2091 Zero means allow extended lvalues. */
2093 int pedantic_lvalues;
2095 /* When pedantic, return an expr equal to X but certainly not valid as a
2096 pedantic lvalue. Otherwise, return X. */
2099 pedantic_non_lvalue_loc (location_t loc, tree x)
2101 if (pedantic_lvalues)
2102 return non_lvalue_loc (loc, x);
2104 return protected_set_expr_location_unshare (x, loc);
2107 /* Given a tree comparison code, return the code that is the logical inverse
2108 of the given code. It is not safe to do this for floating-point
2109 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2110 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2113 invert_tree_comparison (enum tree_code code, bool honor_nans)
2115 if (honor_nans && flag_trapping_math)
2125 return honor_nans ? UNLE_EXPR : LE_EXPR;
2127 return honor_nans ? UNLT_EXPR : LT_EXPR;
2129 return honor_nans ? UNGE_EXPR : GE_EXPR;
2131 return honor_nans ? UNGT_EXPR : GT_EXPR;
2145 return UNORDERED_EXPR;
2146 case UNORDERED_EXPR:
2147 return ORDERED_EXPR;
2153 /* Similar, but return the comparison that results if the operands are
2154 swapped. This is safe for floating-point. */
2157 swap_tree_comparison (enum tree_code code)
2164 case UNORDERED_EXPR:
2190 /* Convert a comparison tree code from an enum tree_code representation
2191 into a compcode bit-based encoding. This function is the inverse of
2192 compcode_to_comparison. */
2194 static enum comparison_code
2195 comparison_to_compcode (enum tree_code code)
2212 return COMPCODE_ORD;
2213 case UNORDERED_EXPR:
2214 return COMPCODE_UNORD;
2216 return COMPCODE_UNLT;
2218 return COMPCODE_UNEQ;
2220 return COMPCODE_UNLE;
2222 return COMPCODE_UNGT;
2224 return COMPCODE_LTGT;
2226 return COMPCODE_UNGE;
2232 /* Convert a compcode bit-based encoding of a comparison operator back
2233 to GCC's enum tree_code representation. This function is the
2234 inverse of comparison_to_compcode. */
2236 static enum tree_code
2237 compcode_to_comparison (enum comparison_code code)
2254 return ORDERED_EXPR;
2255 case COMPCODE_UNORD:
2256 return UNORDERED_EXPR;
2274 /* Return a tree for the comparison which is the combination of
2275 doing the AND or OR (depending on CODE) of the two operations LCODE
2276 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2277 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2278 if this makes the transformation invalid. */
2281 combine_comparisons (location_t loc,
2282 enum tree_code code, enum tree_code lcode,
2283 enum tree_code rcode, tree truth_type,
2284 tree ll_arg, tree lr_arg)
2286 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2287 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2288 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2293 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2294 compcode = lcompcode & rcompcode;
2297 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2298 compcode = lcompcode | rcompcode;
2307 /* Eliminate unordered comparisons, as well as LTGT and ORD
2308 which are not used unless the mode has NaNs. */
2309 compcode &= ~COMPCODE_UNORD;
2310 if (compcode == COMPCODE_LTGT)
2311 compcode = COMPCODE_NE;
2312 else if (compcode == COMPCODE_ORD)
2313 compcode = COMPCODE_TRUE;
2315 else if (flag_trapping_math)
2317 /* Check that the original operation and the optimized ones will trap
2318 under the same condition. */
2319 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2320 && (lcompcode != COMPCODE_EQ)
2321 && (lcompcode != COMPCODE_ORD);
2322 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2323 && (rcompcode != COMPCODE_EQ)
2324 && (rcompcode != COMPCODE_ORD);
2325 bool trap = (compcode & COMPCODE_UNORD) == 0
2326 && (compcode != COMPCODE_EQ)
2327 && (compcode != COMPCODE_ORD);
2329 /* In a short-circuited boolean expression the LHS might be
2330 such that the RHS, if evaluated, will never trap. For
2331 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2332 if neither x nor y is NaN. (This is a mixed blessing: for
2333 example, the expression above will never trap, hence
2334 optimizing it to x < y would be invalid). */
2335 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2336 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2339 /* If the comparison was short-circuited, and only the RHS
2340 trapped, we may now generate a spurious trap. */
2342 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2345 /* If we changed the conditions that cause a trap, we lose. */
2346 if ((ltrap || rtrap) != trap)
2350 if (compcode == COMPCODE_TRUE)
2351 return constant_boolean_node (true, truth_type);
2352 else if (compcode == COMPCODE_FALSE)
2353 return constant_boolean_node (false, truth_type);
2356 enum tree_code tcode;
2358 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2359 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2363 /* Return nonzero if two operands (typically of the same tree node)
2364 are necessarily equal. If either argument has side-effects this
2365 function returns zero. FLAGS modifies behavior as follows:
2367 If OEP_ONLY_CONST is set, only return nonzero for constants.
2368 This function tests whether the operands are indistinguishable;
2369 it does not test whether they are equal using C's == operation.
2370 The distinction is important for IEEE floating point, because
2371 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2372 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2374 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2375 even though it may hold multiple values during a function.
2376 This is because a GCC tree node guarantees that nothing else is
2377 executed between the evaluation of its "operands" (which may often
2378 be evaluated in arbitrary order). Hence if the operands themselves
2379 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2380 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2381 unset means assuming isochronic (or instantaneous) tree equivalence.
2382 Unless comparing arbitrary expression trees, such as from different
2383 statements, this flag can usually be left unset.
2385 If OEP_PURE_SAME is set, then pure functions with identical arguments
2386 are considered the same. It is used when the caller has other ways
2387 to ensure that global memory is unchanged in between. */
2390 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2392 /* If either is ERROR_MARK, they aren't equal. */
2393 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2394 || TREE_TYPE (arg0) == error_mark_node
2395 || TREE_TYPE (arg1) == error_mark_node)
2398 /* Similar, if either does not have a type (like a released SSA name),
2399 they aren't equal. */
2400 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2403 /* Check equality of integer constants before bailing out due to
2404 precision differences. */
2405 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2406 return tree_int_cst_equal (arg0, arg1);
2408 /* If both types don't have the same signedness, then we can't consider
2409 them equal. We must check this before the STRIP_NOPS calls
2410 because they may change the signedness of the arguments. As pointers
2411 strictly don't have a signedness, require either two pointers or
2412 two non-pointers as well. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2414 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2417 /* We cannot consider pointers to different address space equal. */
2418 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2419 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2420 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2423 /* If both types don't have the same precision, then it is not safe
2425 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2431 /* In case both args are comparisons but with different comparison
2432 code, try to swap the comparison operands of one arg to produce
2433 a match and compare that variant. */
2434 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2435 && COMPARISON_CLASS_P (arg0)
2436 && COMPARISON_CLASS_P (arg1))
2438 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2440 if (TREE_CODE (arg0) == swap_code)
2441 return operand_equal_p (TREE_OPERAND (arg0, 0),
2442 TREE_OPERAND (arg1, 1), flags)
2443 && operand_equal_p (TREE_OPERAND (arg0, 1),
2444 TREE_OPERAND (arg1, 0), flags);
2447 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2448 /* This is needed for conversions and for COMPONENT_REF.
2449 Might as well play it safe and always test this. */
2450 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2451 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2452 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2455 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2456 We don't care about side effects in that case because the SAVE_EXPR
2457 takes care of that for us. In all other cases, two expressions are
2458 equal if they have no side effects. If we have two identical
2459 expressions with side effects that should be treated the same due
2460 to the only side effects being identical SAVE_EXPR's, that will
2461 be detected in the recursive calls below.
2462 If we are taking an invariant address of two identical objects
2463 they are necessarily equal as well. */
2464 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2465 && (TREE_CODE (arg0) == SAVE_EXPR
2466 || (flags & OEP_CONSTANT_ADDRESS_OF)
2467 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2470 /* Next handle constant cases, those for which we can return 1 even
2471 if ONLY_CONST is set. */
2472 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2473 switch (TREE_CODE (arg0))
2476 return tree_int_cst_equal (arg0, arg1);
2479 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2480 TREE_FIXED_CST (arg1));
2483 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2484 TREE_REAL_CST (arg1)))
2488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2490 /* If we do not distinguish between signed and unsigned zero,
2491 consider them equal. */
2492 if (real_zerop (arg0) && real_zerop (arg1))
2501 v1 = TREE_VECTOR_CST_ELTS (arg0);
2502 v2 = TREE_VECTOR_CST_ELTS (arg1);
2505 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2508 v1 = TREE_CHAIN (v1);
2509 v2 = TREE_CHAIN (v2);
2516 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2518 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2522 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2523 && ! memcmp (TREE_STRING_POINTER (arg0),
2524 TREE_STRING_POINTER (arg1),
2525 TREE_STRING_LENGTH (arg0)));
2528 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2529 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2530 ? OEP_CONSTANT_ADDRESS_OF : 0);
2535 if (flags & OEP_ONLY_CONST)
2538 /* Define macros to test an operand from arg0 and arg1 for equality and a
2539 variant that allows null and views null as being different from any
2540 non-null value. In the latter case, if either is null, the both
2541 must be; otherwise, do the normal comparison. */
2542 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2543 TREE_OPERAND (arg1, N), flags)
2545 #define OP_SAME_WITH_NULL(N) \
2546 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2547 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2549 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2552 /* Two conversions are equal only if signedness and modes match. */
2553 switch (TREE_CODE (arg0))
2556 case FIX_TRUNC_EXPR:
2557 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2558 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2568 case tcc_comparison:
2570 if (OP_SAME (0) && OP_SAME (1))
2573 /* For commutative ops, allow the other order. */
2574 return (commutative_tree_code (TREE_CODE (arg0))
2575 && operand_equal_p (TREE_OPERAND (arg0, 0),
2576 TREE_OPERAND (arg1, 1), flags)
2577 && operand_equal_p (TREE_OPERAND (arg0, 1),
2578 TREE_OPERAND (arg1, 0), flags));
2581 /* If either of the pointer (or reference) expressions we are
2582 dereferencing contain a side effect, these cannot be equal. */
2583 if (TREE_SIDE_EFFECTS (arg0)
2584 || TREE_SIDE_EFFECTS (arg1))
2587 switch (TREE_CODE (arg0))
2595 /* Require equal access sizes, and similar pointer types.
2596 We can have incomplete types for array references of
2597 variable-sized arrays from the Fortran frontent
2599 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2600 || (TYPE_SIZE (TREE_TYPE (arg0))
2601 && TYPE_SIZE (TREE_TYPE (arg1))
2602 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2603 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2604 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2605 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2606 && OP_SAME (0) && OP_SAME (1));
2609 case ARRAY_RANGE_REF:
2610 /* Operands 2 and 3 may be null.
2611 Compare the array index by value if it is constant first as we
2612 may have different types but same value here. */
2614 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2615 TREE_OPERAND (arg1, 1))
2617 && OP_SAME_WITH_NULL (2)
2618 && OP_SAME_WITH_NULL (3));
2621 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2622 may be NULL when we're called to compare MEM_EXPRs. */
2623 return OP_SAME_WITH_NULL (0)
2625 && OP_SAME_WITH_NULL (2);
2628 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2634 case tcc_expression:
2635 switch (TREE_CODE (arg0))
2638 case TRUTH_NOT_EXPR:
2641 case TRUTH_ANDIF_EXPR:
2642 case TRUTH_ORIF_EXPR:
2643 return OP_SAME (0) && OP_SAME (1);
2646 case WIDEN_MULT_PLUS_EXPR:
2647 case WIDEN_MULT_MINUS_EXPR:
2650 /* The multiplcation operands are commutative. */
2653 case TRUTH_AND_EXPR:
2655 case TRUTH_XOR_EXPR:
2656 if (OP_SAME (0) && OP_SAME (1))
2659 /* Otherwise take into account this is a commutative operation. */
2660 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2661 TREE_OPERAND (arg1, 1), flags)
2662 && operand_equal_p (TREE_OPERAND (arg0, 1),
2663 TREE_OPERAND (arg1, 0), flags));
2668 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2675 switch (TREE_CODE (arg0))
2678 /* If the CALL_EXPRs call different functions, then they
2679 clearly can not be equal. */
2680 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2685 unsigned int cef = call_expr_flags (arg0);
2686 if (flags & OEP_PURE_SAME)
2687 cef &= ECF_CONST | ECF_PURE;
2694 /* Now see if all the arguments are the same. */
2696 const_call_expr_arg_iterator iter0, iter1;
2698 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2699 a1 = first_const_call_expr_arg (arg1, &iter1);
2701 a0 = next_const_call_expr_arg (&iter0),
2702 a1 = next_const_call_expr_arg (&iter1))
2703 if (! operand_equal_p (a0, a1, flags))
2706 /* If we get here and both argument lists are exhausted
2707 then the CALL_EXPRs are equal. */
2708 return ! (a0 || a1);
2714 case tcc_declaration:
2715 /* Consider __builtin_sqrt equal to sqrt. */
2716 return (TREE_CODE (arg0) == FUNCTION_DECL
2717 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2718 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2719 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2726 #undef OP_SAME_WITH_NULL
2729 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2730 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2732 When in doubt, return 0. */
2735 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2737 int unsignedp1, unsignedpo;
2738 tree primarg0, primarg1, primother;
2739 unsigned int correct_width;
2741 if (operand_equal_p (arg0, arg1, 0))
2744 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2745 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2748 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2749 and see if the inner values are the same. This removes any
2750 signedness comparison, which doesn't matter here. */
2751 primarg0 = arg0, primarg1 = arg1;
2752 STRIP_NOPS (primarg0);
2753 STRIP_NOPS (primarg1);
2754 if (operand_equal_p (primarg0, primarg1, 0))
2757 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2758 actual comparison operand, ARG0.
2760 First throw away any conversions to wider types
2761 already present in the operands. */
2763 primarg1 = get_narrower (arg1, &unsignedp1);
2764 primother = get_narrower (other, &unsignedpo);
2766 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2767 if (unsignedp1 == unsignedpo
2768 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2769 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2771 tree type = TREE_TYPE (arg0);
2773 /* Make sure shorter operand is extended the right way
2774 to match the longer operand. */
2775 primarg1 = fold_convert (signed_or_unsigned_type_for
2776 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2778 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2785 /* See if ARG is an expression that is either a comparison or is performing
2786 arithmetic on comparisons. The comparisons must only be comparing
2787 two different values, which will be stored in *CVAL1 and *CVAL2; if
2788 they are nonzero it means that some operands have already been found.
2789 No variables may be used anywhere else in the expression except in the
2790 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2791 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2793 If this is true, return 1. Otherwise, return zero. */
2796 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2798 enum tree_code code = TREE_CODE (arg);
2799 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2801 /* We can handle some of the tcc_expression cases here. */
2802 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2804 else if (tclass == tcc_expression
2805 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2806 || code == COMPOUND_EXPR))
2807 tclass = tcc_binary;
2809 else if (tclass == tcc_expression && code == SAVE_EXPR
2810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2812 /* If we've already found a CVAL1 or CVAL2, this expression is
2813 two complex to handle. */
2814 if (*cval1 || *cval2)
2824 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2827 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2828 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2829 cval1, cval2, save_p));
2834 case tcc_expression:
2835 if (code == COND_EXPR)
2836 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2837 cval1, cval2, save_p)
2838 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2841 cval1, cval2, save_p));
2844 case tcc_comparison:
2845 /* First see if we can handle the first operand, then the second. For
2846 the second operand, we know *CVAL1 can't be zero. It must be that
2847 one side of the comparison is each of the values; test for the
2848 case where this isn't true by failing if the two operands
2851 if (operand_equal_p (TREE_OPERAND (arg, 0),
2852 TREE_OPERAND (arg, 1), 0))
2856 *cval1 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2859 else if (*cval2 == 0)
2860 *cval2 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2866 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2868 else if (*cval2 == 0)
2869 *cval2 = TREE_OPERAND (arg, 1);
2870 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2882 /* ARG is a tree that is known to contain just arithmetic operations and
2883 comparisons. Evaluate the operations in the tree substituting NEW0 for
2884 any occurrence of OLD0 as an operand of a comparison and likewise for
2888 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2889 tree old1, tree new1)
2891 tree type = TREE_TYPE (arg);
2892 enum tree_code code = TREE_CODE (arg);
2893 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2895 /* We can handle some of the tcc_expression cases here. */
2896 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2898 else if (tclass == tcc_expression
2899 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2900 tclass = tcc_binary;
2905 return fold_build1_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1));
2910 return fold_build2_loc (loc, code, type,
2911 eval_subst (loc, TREE_OPERAND (arg, 0),
2912 old0, new0, old1, new1),
2913 eval_subst (loc, TREE_OPERAND (arg, 1),
2914 old0, new0, old1, new1));
2916 case tcc_expression:
2920 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2924 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2928 return fold_build3_loc (loc, code, type,
2929 eval_subst (loc, TREE_OPERAND (arg, 0),
2930 old0, new0, old1, new1),
2931 eval_subst (loc, TREE_OPERAND (arg, 1),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 2),
2934 old0, new0, old1, new1));
2938 /* Fall through - ??? */
2940 case tcc_comparison:
2942 tree arg0 = TREE_OPERAND (arg, 0);
2943 tree arg1 = TREE_OPERAND (arg, 1);
2945 /* We need to check both for exact equality and tree equality. The
2946 former will be true if the operand has a side-effect. In that
2947 case, we know the operand occurred exactly once. */
2949 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2951 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2954 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2956 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2959 return fold_build2_loc (loc, code, type, arg0, arg1);
2967 /* Return a tree for the case when the result of an expression is RESULT
2968 converted to TYPE and OMITTED was previously an operand of the expression
2969 but is now not needed (e.g., we folded OMITTED * 0).
2971 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2972 the conversion of RESULT to TYPE. */
2975 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2977 tree t = fold_convert_loc (loc, type, result);
2979 /* If the resulting operand is an empty statement, just return the omitted
2980 statement casted to void. */
2981 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2982 return build1_loc (loc, NOP_EXPR, void_type_node,
2983 fold_ignored_result (omitted));
2985 if (TREE_SIDE_EFFECTS (omitted))
2986 return build2_loc (loc, COMPOUND_EXPR, type,
2987 fold_ignored_result (omitted), t);
2989 return non_lvalue_loc (loc, t);
2992 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2995 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2998 tree t = fold_convert_loc (loc, type, result);
3000 /* If the resulting operand is an empty statement, just return the omitted
3001 statement casted to void. */
3002 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3003 return build1_loc (loc, NOP_EXPR, void_type_node,
3004 fold_ignored_result (omitted));
3006 if (TREE_SIDE_EFFECTS (omitted))
3007 return build2_loc (loc, COMPOUND_EXPR, type,
3008 fold_ignored_result (omitted), t);
3010 return pedantic_non_lvalue_loc (loc, t);
3013 /* Return a tree for the case when the result of an expression is RESULT
3014 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3015 of the expression but are now not needed.
3017 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3018 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3019 evaluated before OMITTED2. Otherwise, if neither has side effects,
3020 just do the conversion of RESULT to TYPE. */
3023 omit_two_operands_loc (location_t loc, tree type, tree result,
3024 tree omitted1, tree omitted2)
3026 tree t = fold_convert_loc (loc, type, result);
3028 if (TREE_SIDE_EFFECTS (omitted2))
3029 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3030 if (TREE_SIDE_EFFECTS (omitted1))
3031 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3033 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3037 /* Return a simplified tree node for the truth-negation of ARG. This
3038 never alters ARG itself. We assume that ARG is an operation that
3039 returns a truth value (0 or 1).
3041 FIXME: one would think we would fold the result, but it causes
3042 problems with the dominator optimizer. */
3045 fold_truth_not_expr (location_t loc, tree arg)
3047 tree type = TREE_TYPE (arg);
3048 enum tree_code code = TREE_CODE (arg);
3049 location_t loc1, loc2;
3051 /* If this is a comparison, we can simply invert it, except for
3052 floating-point non-equality comparisons, in which case we just
3053 enclose a TRUTH_NOT_EXPR around what we have. */
3055 if (TREE_CODE_CLASS (code) == tcc_comparison)
3057 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3058 if (FLOAT_TYPE_P (op_type)
3059 && flag_trapping_math
3060 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3061 && code != NE_EXPR && code != EQ_EXPR)
3064 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3065 if (code == ERROR_MARK)
3068 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3069 TREE_OPERAND (arg, 1));
3075 return constant_boolean_node (integer_zerop (arg), type);
3077 case TRUTH_AND_EXPR:
3078 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3079 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3080 return build2_loc (loc, TRUTH_OR_EXPR, type,
3081 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3082 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3086 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3087 return build2_loc (loc, TRUTH_AND_EXPR, type,
3088 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3089 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3091 case TRUTH_XOR_EXPR:
3092 /* Here we can invert either operand. We invert the first operand
3093 unless the second operand is a TRUTH_NOT_EXPR in which case our
3094 result is the XOR of the first operand with the inside of the
3095 negation of the second operand. */
3097 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3098 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3099 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3101 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3102 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3103 TREE_OPERAND (arg, 1));
3105 case TRUTH_ANDIF_EXPR:
3106 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3107 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3109 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3110 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3112 case TRUTH_ORIF_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3115 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3116 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3117 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3119 case TRUTH_NOT_EXPR:
3120 return TREE_OPERAND (arg, 0);
3124 tree arg1 = TREE_OPERAND (arg, 1);
3125 tree arg2 = TREE_OPERAND (arg, 2);
3127 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3130 /* A COND_EXPR may have a throw as one operand, which
3131 then has void type. Just leave void operands
3133 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3134 VOID_TYPE_P (TREE_TYPE (arg1))
3135 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3136 VOID_TYPE_P (TREE_TYPE (arg2))
3137 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3142 return build2_loc (loc, COMPOUND_EXPR, type,
3143 TREE_OPERAND (arg, 0),
3144 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3146 case NON_LVALUE_EXPR:
3147 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3148 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3151 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3152 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3154 /* ... fall through ... */
3157 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3158 return build1_loc (loc, TREE_CODE (arg), type,
3159 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3162 if (!integer_onep (TREE_OPERAND (arg, 1)))
3164 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3167 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3169 case CLEANUP_POINT_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3179 /* Return a simplified tree node for the truth-negation of ARG. This
3180 never alters ARG itself. We assume that ARG is an operation that
3181 returns a truth value (0 or 1).
3183 FIXME: one would think we would fold the result, but it causes
3184 problems with the dominator optimizer. */
3187 invert_truthvalue_loc (location_t loc, tree arg)
3191 if (TREE_CODE (arg) == ERROR_MARK)
3194 tem = fold_truth_not_expr (loc, arg);
3196 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3201 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3202 operands are another bit-wise operation with a common input. If so,
3203 distribute the bit operations to save an operation and possibly two if
3204 constants are involved. For example, convert
3205 (A | B) & (A | C) into A | (B & C)
3206 Further simplification will occur if B and C are constants.
3208 If this optimization cannot be done, 0 will be returned. */
3211 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3212 tree arg0, tree arg1)
3217 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3218 || TREE_CODE (arg0) == code
3219 || (TREE_CODE (arg0) != BIT_AND_EXPR
3220 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3223 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3225 common = TREE_OPERAND (arg0, 0);
3226 left = TREE_OPERAND (arg0, 1);
3227 right = TREE_OPERAND (arg1, 1);
3229 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3231 common = TREE_OPERAND (arg0, 0);
3232 left = TREE_OPERAND (arg0, 1);
3233 right = TREE_OPERAND (arg1, 0);
3235 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3237 common = TREE_OPERAND (arg0, 1);
3238 left = TREE_OPERAND (arg0, 0);
3239 right = TREE_OPERAND (arg1, 1);
3241 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3243 common = TREE_OPERAND (arg0, 1);
3244 left = TREE_OPERAND (arg0, 0);
3245 right = TREE_OPERAND (arg1, 0);
3250 common = fold_convert_loc (loc, type, common);
3251 left = fold_convert_loc (loc, type, left);
3252 right = fold_convert_loc (loc, type, right);
3253 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3254 fold_build2_loc (loc, code, type, left, right));
3257 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3258 with code CODE. This optimization is unsafe. */
3260 distribute_real_division (location_t loc, enum tree_code code, tree type,
3261 tree arg0, tree arg1)
3263 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3264 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3266 /* (A / C) +- (B / C) -> (A +- B) / C. */
3268 && operand_equal_p (TREE_OPERAND (arg0, 1),
3269 TREE_OPERAND (arg1, 1), 0))
3270 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3271 fold_build2_loc (loc, code, type,
3272 TREE_OPERAND (arg0, 0),
3273 TREE_OPERAND (arg1, 0)),
3274 TREE_OPERAND (arg0, 1));
3276 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3277 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3278 TREE_OPERAND (arg1, 0), 0)
3279 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3280 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3282 REAL_VALUE_TYPE r0, r1;
3283 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3284 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3286 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3288 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3289 real_arithmetic (&r0, code, &r0, &r1);
3290 return fold_build2_loc (loc, MULT_EXPR, type,
3291 TREE_OPERAND (arg0, 0),
3292 build_real (type, r0));
3298 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3299 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3302 make_bit_field_ref (location_t loc, tree inner, tree type,
3303 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3305 tree result, bftype;
3309 tree size = TYPE_SIZE (TREE_TYPE (inner));
3310 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3311 || POINTER_TYPE_P (TREE_TYPE (inner)))
3312 && host_integerp (size, 0)
3313 && tree_low_cst (size, 0) == bitsize)
3314 return fold_convert_loc (loc, type, inner);
3318 if (TYPE_PRECISION (bftype) != bitsize
3319 || TYPE_UNSIGNED (bftype) == !unsignedp)
3320 bftype = build_nonstandard_integer_type (bitsize, 0);
3322 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3323 size_int (bitsize), bitsize_int (bitpos));
3326 result = fold_convert_loc (loc, type, result);
3331 /* Optimize a bit-field compare.
3333 There are two cases: First is a compare against a constant and the
3334 second is a comparison of two items where the fields are at the same
3335 bit position relative to the start of a chunk (byte, halfword, word)
3336 large enough to contain it. In these cases we can avoid the shift
3337 implicit in bitfield extractions.
3339 For constants, we emit a compare of the shifted constant with the
3340 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3341 compared. For two fields at the same position, we do the ANDs with the
3342 similar mask and compare the result of the ANDs.
3344 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3345 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3346 are the left and right operands of the comparison, respectively.
3348 If the optimization described above can be done, we return the resulting
3349 tree. Otherwise we return zero. */
3352 optimize_bit_field_compare (location_t loc, enum tree_code code,
3353 tree compare_type, tree lhs, tree rhs)
3355 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3356 tree type = TREE_TYPE (lhs);
3357 tree signed_type, unsigned_type;
3358 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3359 enum machine_mode lmode, rmode, nmode;
3360 int lunsignedp, runsignedp;
3361 int lvolatilep = 0, rvolatilep = 0;
3362 tree linner, rinner = NULL_TREE;
3366 /* Get all the information about the extractions being done. If the bit size
3367 if the same as the size of the underlying object, we aren't doing an
3368 extraction at all and so can do nothing. We also don't want to
3369 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3370 then will no longer be able to replace it. */
3371 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3372 &lunsignedp, &lvolatilep, false);
3373 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3374 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3379 /* If this is not a constant, we can only do something if bit positions,
3380 sizes, and signedness are the same. */
3381 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3382 &runsignedp, &rvolatilep, false);
3384 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3385 || lunsignedp != runsignedp || offset != 0
3386 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3390 /* See if we can find a mode to refer to this field. We should be able to,
3391 but fail if we can't. */
3393 && GET_MODE_BITSIZE (lmode) > 0
3394 && flag_strict_volatile_bitfields > 0)
3397 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3398 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3399 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3400 TYPE_ALIGN (TREE_TYPE (rinner))),
3401 word_mode, lvolatilep || rvolatilep);
3402 if (nmode == VOIDmode)
3405 /* Set signed and unsigned types of the precision of this mode for the
3407 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3408 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3410 /* Compute the bit position and size for the new reference and our offset
3411 within it. If the new reference is the same size as the original, we
3412 won't optimize anything, so return zero. */
3413 nbitsize = GET_MODE_BITSIZE (nmode);
3414 nbitpos = lbitpos & ~ (nbitsize - 1);
3416 if (nbitsize == lbitsize)
3419 if (BYTES_BIG_ENDIAN)
3420 lbitpos = nbitsize - lbitsize - lbitpos;
3422 /* Make the mask to be used against the extracted field. */
3423 mask = build_int_cst_type (unsigned_type, -1);
3424 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3425 mask = const_binop (RSHIFT_EXPR, mask,
3426 size_int (nbitsize - lbitsize - lbitpos));
3429 /* If not comparing with constant, just rework the comparison
3431 return fold_build2_loc (loc, code, compare_type,
3432 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3433 make_bit_field_ref (loc, linner,
3438 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3439 make_bit_field_ref (loc, rinner,
3445 /* Otherwise, we are handling the constant case. See if the constant is too
3446 big for the field. Warn and return a tree of for 0 (false) if so. We do
3447 this not only for its own sake, but to avoid having to test for this
3448 error case below. If we didn't, we might generate wrong code.
3450 For unsigned fields, the constant shifted right by the field length should
3451 be all zero. For signed fields, the high-order bits should agree with
3456 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3457 fold_convert_loc (loc,
3458 unsigned_type, rhs),
3459 size_int (lbitsize))))
3461 warning (0, "comparison is always %d due to width of bit-field",
3463 return constant_boolean_node (code == NE_EXPR, compare_type);
3468 tree tem = const_binop (RSHIFT_EXPR,
3469 fold_convert_loc (loc, signed_type, rhs),
3470 size_int (lbitsize - 1));
3471 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3473 warning (0, "comparison is always %d due to width of bit-field",
3475 return constant_boolean_node (code == NE_EXPR, compare_type);
3479 /* Single-bit compares should always be against zero. */
3480 if (lbitsize == 1 && ! integer_zerop (rhs))
3482 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3483 rhs = build_int_cst (type, 0);
3486 /* Make a new bitfield reference, shift the constant over the
3487 appropriate number of bits and mask it with the computed mask
3488 (in case this was a signed field). If we changed it, make a new one. */
3489 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3492 TREE_SIDE_EFFECTS (lhs) = 1;
3493 TREE_THIS_VOLATILE (lhs) = 1;
3496 rhs = const_binop (BIT_AND_EXPR,
3497 const_binop (LSHIFT_EXPR,
3498 fold_convert_loc (loc, unsigned_type, rhs),
3499 size_int (lbitpos)),
3502 lhs = build2_loc (loc, code, compare_type,
3503 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3507 /* Subroutine for fold_truthop: decode a field reference.
3509 If EXP is a comparison reference, we return the innermost reference.
3511 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3512 set to the starting bit number.
3514 If the innermost field can be completely contained in a mode-sized
3515 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3517 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3518 otherwise it is not changed.
3520 *PUNSIGNEDP is set to the signedness of the field.
3522 *PMASK is set to the mask used. This is either contained in a
3523 BIT_AND_EXPR or derived from the width of the field.
3525 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3527 Return 0 if this is not a component reference or is one that we can't
3528 do anything with. */
3531 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3532 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3533 int *punsignedp, int *pvolatilep,
3534 tree *pmask, tree *pand_mask)
3536 tree outer_type = 0;
3538 tree mask, inner, offset;
3540 unsigned int precision;
3542 /* All the optimizations using this function assume integer fields.
3543 There are problems with FP fields since the type_for_size call
3544 below can fail for, e.g., XFmode. */
3545 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3548 /* We are interested in the bare arrangement of bits, so strip everything
3549 that doesn't affect the machine mode. However, record the type of the
3550 outermost expression if it may matter below. */
3551 if (CONVERT_EXPR_P (exp)
3552 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3553 outer_type = TREE_TYPE (exp);
3556 if (TREE_CODE (exp) == BIT_AND_EXPR)
3558 and_mask = TREE_OPERAND (exp, 1);
3559 exp = TREE_OPERAND (exp, 0);
3560 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3561 if (TREE_CODE (and_mask) != INTEGER_CST)
3565 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3566 punsignedp, pvolatilep, false);
3567 if ((inner == exp && and_mask == 0)
3568 || *pbitsize < 0 || offset != 0
3569 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3572 /* If the number of bits in the reference is the same as the bitsize of
3573 the outer type, then the outer type gives the signedness. Otherwise
3574 (in case of a small bitfield) the signedness is unchanged. */
3575 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3576 *punsignedp = TYPE_UNSIGNED (outer_type);
3578 /* Compute the mask to access the bitfield. */
3579 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3580 precision = TYPE_PRECISION (unsigned_type);
3582 mask = build_int_cst_type (unsigned_type, -1);
3584 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3585 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3587 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3589 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3590 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3593 *pand_mask = and_mask;
3597 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3601 all_ones_mask_p (const_tree mask, int size)
3603 tree type = TREE_TYPE (mask);
3604 unsigned int precision = TYPE_PRECISION (type);
3607 tmask = build_int_cst_type (signed_type_for (type), -1);
3610 tree_int_cst_equal (mask,
3611 const_binop (RSHIFT_EXPR,
3612 const_binop (LSHIFT_EXPR, tmask,
3613 size_int (precision - size)),
3614 size_int (precision - size)));
3617 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3618 represents the sign bit of EXP's type. If EXP represents a sign
3619 or zero extension, also test VAL against the unextended type.
3620 The return value is the (sub)expression whose sign bit is VAL,
3621 or NULL_TREE otherwise. */
3624 sign_bit_p (tree exp, const_tree val)
3626 unsigned HOST_WIDE_INT mask_lo, lo;
3627 HOST_WIDE_INT mask_hi, hi;
3631 /* Tree EXP must have an integral type. */
3632 t = TREE_TYPE (exp);
3633 if (! INTEGRAL_TYPE_P (t))
3636 /* Tree VAL must be an integer constant. */
3637 if (TREE_CODE (val) != INTEGER_CST
3638 || TREE_OVERFLOW (val))
3641 width = TYPE_PRECISION (t);
3642 if (width > HOST_BITS_PER_WIDE_INT)
3644 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3647 mask_hi = ((unsigned HOST_WIDE_INT) -1
3648 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3654 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3657 mask_lo = ((unsigned HOST_WIDE_INT) -1
3658 >> (HOST_BITS_PER_WIDE_INT - width));
3661 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3662 treat VAL as if it were unsigned. */
3663 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3664 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3667 /* Handle extension from a narrower type. */
3668 if (TREE_CODE (exp) == NOP_EXPR
3669 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3670 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3675 /* Subroutine for fold_truthop: determine if an operand is simple enough
3676 to be evaluated unconditionally. */
3679 simple_operand_p (const_tree exp)
3681 /* Strip any conversions that don't change the machine mode. */
3684 return (CONSTANT_CLASS_P (exp)
3685 || TREE_CODE (exp) == SSA_NAME
3687 && ! TREE_ADDRESSABLE (exp)
3688 && ! TREE_THIS_VOLATILE (exp)
3689 && ! DECL_NONLOCAL (exp)
3690 /* Don't regard global variables as simple. They may be
3691 allocated in ways unknown to the compiler (shared memory,
3692 #pragma weak, etc). */
3693 && ! TREE_PUBLIC (exp)
3694 && ! DECL_EXTERNAL (exp)
3695 /* Loading a static variable is unduly expensive, but global
3696 registers aren't expensive. */
3697 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3700 /* The following functions are subroutines to fold_range_test and allow it to
3701 try to change a logical combination of comparisons into a range test.
3704 X == 2 || X == 3 || X == 4 || X == 5
3708 (unsigned) (X - 2) <= 3
3710 We describe each set of comparisons as being either inside or outside
3711 a range, using a variable named like IN_P, and then describe the
3712 range with a lower and upper bound. If one of the bounds is omitted,
3713 it represents either the highest or lowest value of the type.
3715 In the comments below, we represent a range by two numbers in brackets
3716 preceded by a "+" to designate being inside that range, or a "-" to
3717 designate being outside that range, so the condition can be inverted by
3718 flipping the prefix. An omitted bound is represented by a "-". For
3719 example, "- [-, 10]" means being outside the range starting at the lowest
3720 possible value and ending at 10, in other words, being greater than 10.
3721 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3724 We set up things so that the missing bounds are handled in a consistent
3725 manner so neither a missing bound nor "true" and "false" need to be
3726 handled using a special case. */
3728 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3729 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3730 and UPPER1_P are nonzero if the respective argument is an upper bound
3731 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3732 must be specified for a comparison. ARG1 will be converted to ARG0's
3733 type if both are specified. */
3736 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3737 tree arg1, int upper1_p)
3743 /* If neither arg represents infinity, do the normal operation.
3744 Else, if not a comparison, return infinity. Else handle the special
3745 comparison rules. Note that most of the cases below won't occur, but
3746 are handled for consistency. */
3748 if (arg0 != 0 && arg1 != 0)
3750 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3751 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3753 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3756 if (TREE_CODE_CLASS (code) != tcc_comparison)
3759 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3760 for neither. In real maths, we cannot assume open ended ranges are
3761 the same. But, this is computer arithmetic, where numbers are finite.
3762 We can therefore make the transformation of any unbounded range with
3763 the value Z, Z being greater than any representable number. This permits
3764 us to treat unbounded ranges as equal. */
3765 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3766 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3770 result = sgn0 == sgn1;
3773 result = sgn0 != sgn1;
3776 result = sgn0 < sgn1;
3779 result = sgn0 <= sgn1;
3782 result = sgn0 > sgn1;
3785 result = sgn0 >= sgn1;
3791 return constant_boolean_node (result, type);
3794 /* Given EXP, a logical expression, set the range it is testing into
3795 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3796 actually being tested. *PLOW and *PHIGH will be made of the same
3797 type as the returned expression. If EXP is not a comparison, we
3798 will most likely not be returning a useful value and range. Set
3799 *STRICT_OVERFLOW_P to true if the return value is only valid
3800 because signed overflow is undefined; otherwise, do not change
3801 *STRICT_OVERFLOW_P. */
3804 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3805 bool *strict_overflow_p)
3807 enum tree_code code;
3808 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3809 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3811 tree low, high, n_low, n_high;
3812 location_t loc = EXPR_LOCATION (exp);
3814 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3815 and see if we can refine the range. Some of the cases below may not
3816 happen, but it doesn't seem worth worrying about this. We "continue"
3817 the outer loop when we've changed something; otherwise we "break"
3818 the switch, which will "break" the while. */
3821 low = high = build_int_cst (TREE_TYPE (exp), 0);
3825 code = TREE_CODE (exp);
3826 exp_type = TREE_TYPE (exp);
3828 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3830 if (TREE_OPERAND_LENGTH (exp) > 0)
3831 arg0 = TREE_OPERAND (exp, 0);
3832 if (TREE_CODE_CLASS (code) == tcc_comparison
3833 || TREE_CODE_CLASS (code) == tcc_unary
3834 || TREE_CODE_CLASS (code) == tcc_binary)
3835 arg0_type = TREE_TYPE (arg0);
3836 if (TREE_CODE_CLASS (code) == tcc_binary
3837 || TREE_CODE_CLASS (code) == tcc_comparison
3838 || (TREE_CODE_CLASS (code) == tcc_expression
3839 && TREE_OPERAND_LENGTH (exp) > 1))
3840 arg1 = TREE_OPERAND (exp, 1);
3845 case TRUTH_NOT_EXPR:
3846 in_p = ! in_p, exp = arg0;
3849 case EQ_EXPR: case NE_EXPR:
3850 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3851 /* We can only do something if the range is testing for zero
3852 and if the second operand is an integer constant. Note that
3853 saying something is "in" the range we make is done by
3854 complementing IN_P since it will set in the initial case of
3855 being not equal to zero; "out" is leaving it alone. */
3856 if (low == 0 || high == 0
3857 || ! integer_zerop (low) || ! integer_zerop (high)
3858 || TREE_CODE (arg1) != INTEGER_CST)
3863 case NE_EXPR: /* - [c, c] */
3866 case EQ_EXPR: /* + [c, c] */
3867 in_p = ! in_p, low = high = arg1;
3869 case GT_EXPR: /* - [-, c] */
3870 low = 0, high = arg1;
3872 case GE_EXPR: /* + [c, -] */
3873 in_p = ! in_p, low = arg1, high = 0;
3875 case LT_EXPR: /* - [c, -] */
3876 low = arg1, high = 0;
3878 case LE_EXPR: /* + [-, c] */
3879 in_p = ! in_p, low = 0, high = arg1;
3885 /* If this is an unsigned comparison, we also know that EXP is
3886 greater than or equal to zero. We base the range tests we make
3887 on that fact, so we record it here so we can parse existing
3888 range tests. We test arg0_type since often the return type
3889 of, e.g. EQ_EXPR, is boolean. */
3890 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3892 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3894 build_int_cst (arg0_type, 0),
3898 in_p = n_in_p, low = n_low, high = n_high;
3900 /* If the high bound is missing, but we have a nonzero low
3901 bound, reverse the range so it goes from zero to the low bound
3903 if (high == 0 && low && ! integer_zerop (low))
3906 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3907 integer_one_node, 0);
3908 low = build_int_cst (arg0_type, 0);
3916 /* (-x) IN [a,b] -> x in [-b, -a] */
3917 n_low = range_binop (MINUS_EXPR, exp_type,
3918 build_int_cst (exp_type, 0),
3920 n_high = range_binop (MINUS_EXPR, exp_type,
3921 build_int_cst (exp_type, 0),
3923 if (n_high != 0 && TREE_OVERFLOW (n_high))
3929 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3930 build_int_cst (exp_type, 1));
3933 case PLUS_EXPR: case MINUS_EXPR:
3934 if (TREE_CODE (arg1) != INTEGER_CST)
3937 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3938 move a constant to the other side. */
3939 if (!TYPE_UNSIGNED (arg0_type)
3940 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3943 /* If EXP is signed, any overflow in the computation is undefined,
3944 so we don't worry about it so long as our computations on
3945 the bounds don't overflow. For unsigned, overflow is defined
3946 and this is exactly the right thing. */
3947 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3948 arg0_type, low, 0, arg1, 0);
3949 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3950 arg0_type, high, 1, arg1, 0);
3951 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3952 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3955 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3956 *strict_overflow_p = true;
3959 /* Check for an unsigned range which has wrapped around the maximum
3960 value thus making n_high < n_low, and normalize it. */
3961 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3963 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3964 integer_one_node, 0);
3965 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3966 integer_one_node, 0);
3968 /* If the range is of the form +/- [ x+1, x ], we won't
3969 be able to normalize it. But then, it represents the
3970 whole range or the empty set, so make it
3972 if (tree_int_cst_equal (n_low, low)
3973 && tree_int_cst_equal (n_high, high))
3979 low = n_low, high = n_high;
3984 CASE_CONVERT: case NON_LVALUE_EXPR:
3985 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3988 if (! INTEGRAL_TYPE_P (arg0_type)
3989 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3990 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3993 n_low = low, n_high = high;
3996 n_low = fold_convert_loc (loc, arg0_type, n_low);
3999 n_high = fold_convert_loc (loc, arg0_type, n_high);
4002 /* If we're converting arg0 from an unsigned type, to exp,
4003 a signed type, we will be doing the comparison as unsigned.
4004 The tests above have already verified that LOW and HIGH
4007 So we have to ensure that we will handle large unsigned
4008 values the same way that the current signed bounds treat
4011 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4015 /* For fixed-point modes, we need to pass the saturating flag
4016 as the 2nd parameter. */
4017 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4018 equiv_type = lang_hooks.types.type_for_mode
4019 (TYPE_MODE (arg0_type),
4020 TYPE_SATURATING (arg0_type));
4022 equiv_type = lang_hooks.types.type_for_mode
4023 (TYPE_MODE (arg0_type), 1);
4025 /* A range without an upper bound is, naturally, unbounded.
4026 Since convert would have cropped a very large value, use
4027 the max value for the destination type. */
4029 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4030 : TYPE_MAX_VALUE (arg0_type);
4032 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4033 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4034 fold_convert_loc (loc, arg0_type,
4036 build_int_cst (arg0_type, 1));
4038 /* If the low bound is specified, "and" the range with the
4039 range for which the original unsigned value will be
4043 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4044 1, n_low, n_high, 1,
4045 fold_convert_loc (loc, arg0_type,
4050 in_p = (n_in_p == in_p);
4054 /* Otherwise, "or" the range with the range of the input
4055 that will be interpreted as negative. */
4056 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4057 0, n_low, n_high, 1,
4058 fold_convert_loc (loc, arg0_type,
4063 in_p = (in_p != n_in_p);
4068 low = n_low, high = n_high;
4078 /* If EXP is a constant, we can evaluate whether this is true or false. */
4079 if (TREE_CODE (exp) == INTEGER_CST)
4081 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4083 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4089 *pin_p = in_p, *plow = low, *phigh = high;
4093 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4094 type, TYPE, return an expression to test if EXP is in (or out of, depending
4095 on IN_P) the range. Return 0 if the test couldn't be created. */
4098 build_range_check (location_t loc, tree type, tree exp, int in_p,
4099 tree low, tree high)
4101 tree etype = TREE_TYPE (exp), value;
4103 #ifdef HAVE_canonicalize_funcptr_for_compare
4104 /* Disable this optimization for function pointer expressions
4105 on targets that require function pointer canonicalization. */
4106 if (HAVE_canonicalize_funcptr_for_compare
4107 && TREE_CODE (etype) == POINTER_TYPE
4108 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4114 value = build_range_check (loc, type, exp, 1, low, high);
4116 return invert_truthvalue_loc (loc, value);
4121 if (low == 0 && high == 0)
4122 return build_int_cst (type, 1);
4125 return fold_build2_loc (loc, LE_EXPR, type, exp,
4126 fold_convert_loc (loc, etype, high));
4129 return fold_build2_loc (loc, GE_EXPR, type, exp,
4130 fold_convert_loc (loc, etype, low));
4132 if (operand_equal_p (low, high, 0))
4133 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4134 fold_convert_loc (loc, etype, low));
4136 if (integer_zerop (low))
4138 if (! TYPE_UNSIGNED (etype))
4140 etype = unsigned_type_for (etype);
4141 high = fold_convert_loc (loc, etype, high);
4142 exp = fold_convert_loc (loc, etype, exp);
4144 return build_range_check (loc, type, exp, 1, 0, high);
4147 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4148 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4150 unsigned HOST_WIDE_INT lo;
4154 prec = TYPE_PRECISION (etype);
4155 if (prec <= HOST_BITS_PER_WIDE_INT)
4158 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4162 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4163 lo = (unsigned HOST_WIDE_INT) -1;
4166 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4168 if (TYPE_UNSIGNED (etype))
4170 tree signed_etype = signed_type_for (etype);
4171 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4173 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4175 etype = signed_etype;
4176 exp = fold_convert_loc (loc, etype, exp);
4178 return fold_build2_loc (loc, GT_EXPR, type, exp,
4179 build_int_cst (etype, 0));
4183 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4184 This requires wrap-around arithmetics for the type of the expression.
4185 First make sure that arithmetics in this type is valid, then make sure
4186 that it wraps around. */
4187 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4188 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4189 TYPE_UNSIGNED (etype));
4191 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4193 tree utype, minv, maxv;
4195 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4196 for the type in question, as we rely on this here. */
4197 utype = unsigned_type_for (etype);
4198 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4199 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4200 integer_one_node, 1);
4201 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4203 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4210 high = fold_convert_loc (loc, etype, high);
4211 low = fold_convert_loc (loc, etype, low);
4212 exp = fold_convert_loc (loc, etype, exp);
4214 value = const_binop (MINUS_EXPR, high, low);
4217 if (POINTER_TYPE_P (etype))
4219 if (value != 0 && !TREE_OVERFLOW (value))
4221 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4222 return build_range_check (loc, type,
4223 fold_build_pointer_plus_loc (loc, exp, low),
4224 1, build_int_cst (etype, 0), value);
4229 if (value != 0 && !TREE_OVERFLOW (value))
4230 return build_range_check (loc, type,
4231 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4232 1, build_int_cst (etype, 0), value);
4237 /* Return the predecessor of VAL in its type, handling the infinite case. */
4240 range_predecessor (tree val)
4242 tree type = TREE_TYPE (val);
4244 if (INTEGRAL_TYPE_P (type)
4245 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4248 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4251 /* Return the successor of VAL in its type, handling the infinite case. */
4254 range_successor (tree val)
4256 tree type = TREE_TYPE (val);
4258 if (INTEGRAL_TYPE_P (type)
4259 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4262 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4265 /* Given two ranges, see if we can merge them into one. Return 1 if we
4266 can, 0 if we can't. Set the output range into the specified parameters. */
4269 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4270 tree high0, int in1_p, tree low1, tree high1)
4278 int lowequal = ((low0 == 0 && low1 == 0)
4279 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4280 low0, 0, low1, 0)));
4281 int highequal = ((high0 == 0 && high1 == 0)
4282 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4283 high0, 1, high1, 1)));
4285 /* Make range 0 be the range that starts first, or ends last if they
4286 start at the same value. Swap them if it isn't. */
4287 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4290 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4291 high1, 1, high0, 1))))
4293 temp = in0_p, in0_p = in1_p, in1_p = temp;
4294 tem = low0, low0 = low1, low1 = tem;
4295 tem = high0, high0 = high1, high1 = tem;
4298 /* Now flag two cases, whether the ranges are disjoint or whether the
4299 second range is totally subsumed in the first. Note that the tests
4300 below are simplified by the ones above. */
4301 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4302 high0, 1, low1, 0));
4303 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4304 high1, 1, high0, 1));
4306 /* We now have four cases, depending on whether we are including or
4307 excluding the two ranges. */
4310 /* If they don't overlap, the result is false. If the second range
4311 is a subset it is the result. Otherwise, the range is from the start
4312 of the second to the end of the first. */
4314 in_p = 0, low = high = 0;
4316 in_p = 1, low = low1, high = high1;
4318 in_p = 1, low = low1, high = high0;
4321 else if (in0_p && ! in1_p)
4323 /* If they don't overlap, the result is the first range. If they are
4324 equal, the result is false. If the second range is a subset of the
4325 first, and the ranges begin at the same place, we go from just after
4326 the end of the second range to the end of the first. If the second
4327 range is not a subset of the first, or if it is a subset and both
4328 ranges end at the same place, the range starts at the start of the
4329 first range and ends just before the second range.
4330 Otherwise, we can't describe this as a single range. */
4332 in_p = 1, low = low0, high = high0;
4333 else if (lowequal && highequal)
4334 in_p = 0, low = high = 0;
4335 else if (subset && lowequal)
4337 low = range_successor (high1);
4342 /* We are in the weird situation where high0 > high1 but
4343 high1 has no successor. Punt. */
4347 else if (! subset || highequal)
4350 high = range_predecessor (low1);
4354 /* low0 < low1 but low1 has no predecessor. Punt. */
4362 else if (! in0_p && in1_p)
4364 /* If they don't overlap, the result is the second range. If the second
4365 is a subset of the first, the result is false. Otherwise,
4366 the range starts just after the first range and ends at the
4367 end of the second. */
4369 in_p = 1, low = low1, high = high1;
4370 else if (subset || highequal)
4371 in_p = 0, low = high = 0;
4374 low = range_successor (high0);
4379 /* high1 > high0 but high0 has no successor. Punt. */
4387 /* The case where we are excluding both ranges. Here the complex case
4388 is if they don't overlap. In that case, the only time we have a
4389 range is if they are adjacent. If the second is a subset of the
4390 first, the result is the first. Otherwise, the range to exclude
4391 starts at the beginning of the first range and ends at the end of the
4395 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4396 range_successor (high0),
4398 in_p = 0, low = low0, high = high1;
4401 /* Canonicalize - [min, x] into - [-, x]. */
4402 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4403 switch (TREE_CODE (TREE_TYPE (low0)))
4406 if (TYPE_PRECISION (TREE_TYPE (low0))
4407 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4411 if (tree_int_cst_equal (low0,
4412 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4416 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4417 && integer_zerop (low0))
4424 /* Canonicalize - [x, max] into - [x, -]. */
4425 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4426 switch (TREE_CODE (TREE_TYPE (high1)))
4429 if (TYPE_PRECISION (TREE_TYPE (high1))
4430 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4434 if (tree_int_cst_equal (high1,
4435 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4439 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4440 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4442 integer_one_node, 1)))
4449 /* The ranges might be also adjacent between the maximum and
4450 minimum values of the given type. For
4451 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4452 return + [x + 1, y - 1]. */
4453 if (low0 == 0 && high1 == 0)
4455 low = range_successor (high0);
4456 high = range_predecessor (low1);
4457 if (low == 0 || high == 0)
4467 in_p = 0, low = low0, high = high0;
4469 in_p = 0, low = low0, high = high1;
4472 *pin_p = in_p, *plow = low, *phigh = high;
4477 /* Subroutine of fold, looking inside expressions of the form
4478 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4479 of the COND_EXPR. This function is being used also to optimize
4480 A op B ? C : A, by reversing the comparison first.
4482 Return a folded expression whose code is not a COND_EXPR
4483 anymore, or NULL_TREE if no folding opportunity is found. */
4486 fold_cond_expr_with_comparison (location_t loc, tree type,
4487 tree arg0, tree arg1, tree arg2)
4489 enum tree_code comp_code = TREE_CODE (arg0);
4490 tree arg00 = TREE_OPERAND (arg0, 0);
4491 tree arg01 = TREE_OPERAND (arg0, 1);
4492 tree arg1_type = TREE_TYPE (arg1);
4498 /* If we have A op 0 ? A : -A, consider applying the following
4501 A == 0? A : -A same as -A
4502 A != 0? A : -A same as A
4503 A >= 0? A : -A same as abs (A)
4504 A > 0? A : -A same as abs (A)
4505 A <= 0? A : -A same as -abs (A)
4506 A < 0? A : -A same as -abs (A)
4508 None of these transformations work for modes with signed
4509 zeros. If A is +/-0, the first two transformations will
4510 change the sign of the result (from +0 to -0, or vice
4511 versa). The last four will fix the sign of the result,
4512 even though the original expressions could be positive or
4513 negative, depending on the sign of A.
4515 Note that all these transformations are correct if A is
4516 NaN, since the two alternatives (A and -A) are also NaNs. */
4517 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4518 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4519 ? real_zerop (arg01)
4520 : integer_zerop (arg01))
4521 && ((TREE_CODE (arg2) == NEGATE_EXPR
4522 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4523 /* In the case that A is of the form X-Y, '-A' (arg2) may
4524 have already been folded to Y-X, check for that. */
4525 || (TREE_CODE (arg1) == MINUS_EXPR
4526 && TREE_CODE (arg2) == MINUS_EXPR
4527 && operand_equal_p (TREE_OPERAND (arg1, 0),
4528 TREE_OPERAND (arg2, 1), 0)
4529 && operand_equal_p (TREE_OPERAND (arg1, 1),
4530 TREE_OPERAND (arg2, 0), 0))))
4535 tem = fold_convert_loc (loc, arg1_type, arg1);
4536 return pedantic_non_lvalue_loc (loc,
4537 fold_convert_loc (loc, type,
4538 negate_expr (tem)));
4541 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4544 if (flag_trapping_math)
4549 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4550 arg1 = fold_convert_loc (loc, signed_type_for
4551 (TREE_TYPE (arg1)), arg1);
4552 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4553 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4556 if (flag_trapping_math)
4560 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4561 arg1 = fold_convert_loc (loc, signed_type_for
4562 (TREE_TYPE (arg1)), arg1);
4563 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4564 return negate_expr (fold_convert_loc (loc, type, tem));
4566 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4570 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4571 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4572 both transformations are correct when A is NaN: A != 0
4573 is then true, and A == 0 is false. */
4575 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4576 && integer_zerop (arg01) && integer_zerop (arg2))
4578 if (comp_code == NE_EXPR)
4579 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4580 else if (comp_code == EQ_EXPR)
4581 return build_int_cst (type, 0);
4584 /* Try some transformations of A op B ? A : B.
4586 A == B? A : B same as B
4587 A != B? A : B same as A
4588 A >= B? A : B same as max (A, B)
4589 A > B? A : B same as max (B, A)
4590 A <= B? A : B same as min (A, B)
4591 A < B? A : B same as min (B, A)
4593 As above, these transformations don't work in the presence
4594 of signed zeros. For example, if A and B are zeros of
4595 opposite sign, the first two transformations will change
4596 the sign of the result. In the last four, the original
4597 expressions give different results for (A=+0, B=-0) and
4598 (A=-0, B=+0), but the transformed expressions do not.
4600 The first two transformations are correct if either A or B
4601 is a NaN. In the first transformation, the condition will
4602 be false, and B will indeed be chosen. In the case of the
4603 second transformation, the condition A != B will be true,
4604 and A will be chosen.
4606 The conversions to max() and min() are not correct if B is
4607 a number and A is not. The conditions in the original
4608 expressions will be false, so all four give B. The min()
4609 and max() versions would give a NaN instead. */
4610 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4611 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4612 /* Avoid these transformations if the COND_EXPR may be used
4613 as an lvalue in the C++ front-end. PR c++/19199. */
4615 || (strcmp (lang_hooks.name, "GNU C++") != 0
4616 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4617 || ! maybe_lvalue_p (arg1)
4618 || ! maybe_lvalue_p (arg2)))
4620 tree comp_op0 = arg00;
4621 tree comp_op1 = arg01;
4622 tree comp_type = TREE_TYPE (comp_op0);
4624 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4625 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4635 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4637 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4642 /* In C++ a ?: expression can be an lvalue, so put the
4643 operand which will be used if they are equal first
4644 so that we can convert this back to the
4645 corresponding COND_EXPR. */
4646 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4648 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4649 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4650 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4651 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4652 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4653 comp_op1, comp_op0);
4654 return pedantic_non_lvalue_loc (loc,
4655 fold_convert_loc (loc, type, tem));
4662 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4664 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4665 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4666 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4667 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4668 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4669 comp_op1, comp_op0);
4670 return pedantic_non_lvalue_loc (loc,
4671 fold_convert_loc (loc, type, tem));
4675 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4676 return pedantic_non_lvalue_loc (loc,
4677 fold_convert_loc (loc, type, arg2));
4680 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4681 return pedantic_non_lvalue_loc (loc,
4682 fold_convert_loc (loc, type, arg1));
4685 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4690 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4691 we might still be able to simplify this. For example,
4692 if C1 is one less or one more than C2, this might have started
4693 out as a MIN or MAX and been transformed by this function.
4694 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4696 if (INTEGRAL_TYPE_P (type)
4697 && TREE_CODE (arg01) == INTEGER_CST
4698 && TREE_CODE (arg2) == INTEGER_CST)
4702 if (TREE_CODE (arg1) == INTEGER_CST)
4704 /* We can replace A with C1 in this case. */
4705 arg1 = fold_convert_loc (loc, type, arg01);
4706 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4709 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4710 MIN_EXPR, to preserve the signedness of the comparison. */
4711 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4713 && operand_equal_p (arg01,
4714 const_binop (PLUS_EXPR, arg2,
4715 build_int_cst (type, 1)),
4718 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4719 fold_convert_loc (loc, TREE_TYPE (arg00),
4721 return pedantic_non_lvalue_loc (loc,
4722 fold_convert_loc (loc, type, tem));
4727 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4729 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4731 && operand_equal_p (arg01,
4732 const_binop (MINUS_EXPR, arg2,
4733 build_int_cst (type, 1)),
4736 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4737 fold_convert_loc (loc, TREE_TYPE (arg00),
4739 return pedantic_non_lvalue_loc (loc,
4740 fold_convert_loc (loc, type, tem));
4745 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4746 MAX_EXPR, to preserve the signedness of the comparison. */
4747 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4749 && operand_equal_p (arg01,
4750 const_binop (MINUS_EXPR, arg2,
4751 build_int_cst (type, 1)),
4754 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4755 fold_convert_loc (loc, TREE_TYPE (arg00),
4757 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4762 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4763 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4765 && operand_equal_p (arg01,
4766 const_binop (PLUS_EXPR, arg2,
4767 build_int_cst (type, 1)),
4770 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4771 fold_convert_loc (loc, TREE_TYPE (arg00),
4773 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4787 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4788 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4789 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4793 /* EXP is some logical combination of boolean tests. See if we can
4794 merge it into some range test. Return the new tree if so. */
4797 fold_range_test (location_t loc, enum tree_code code, tree type,
4800 int or_op = (code == TRUTH_ORIF_EXPR
4801 || code == TRUTH_OR_EXPR);
4802 int in0_p, in1_p, in_p;
4803 tree low0, low1, low, high0, high1, high;
4804 bool strict_overflow_p = false;
4805 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4806 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4808 const char * const warnmsg = G_("assuming signed overflow does not occur "
4809 "when simplifying range test");
4811 /* If this is an OR operation, invert both sides; we will invert
4812 again at the end. */
4814 in0_p = ! in0_p, in1_p = ! in1_p;
4816 /* If both expressions are the same, if we can merge the ranges, and we
4817 can build the range test, return it or it inverted. If one of the
4818 ranges is always true or always false, consider it to be the same
4819 expression as the other. */
4820 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4821 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4823 && 0 != (tem = (build_range_check (loc, type,
4825 : rhs != 0 ? rhs : integer_zero_node,
4828 if (strict_overflow_p)
4829 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4830 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4833 /* On machines where the branch cost is expensive, if this is a
4834 short-circuited branch and the underlying object on both sides
4835 is the same, make a non-short-circuit operation. */
4836 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4837 && lhs != 0 && rhs != 0
4838 && (code == TRUTH_ANDIF_EXPR
4839 || code == TRUTH_ORIF_EXPR)
4840 && operand_equal_p (lhs, rhs, 0))
4842 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4843 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4844 which cases we can't do this. */
4845 if (simple_operand_p (lhs))
4846 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4847 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4850 else if (!lang_hooks.decls.global_bindings_p ()
4851 && !CONTAINS_PLACEHOLDER_P (lhs))
4853 tree common = save_expr (lhs);
4855 if (0 != (lhs = build_range_check (loc, type, common,
4856 or_op ? ! in0_p : in0_p,
4858 && (0 != (rhs = build_range_check (loc, type, common,
4859 or_op ? ! in1_p : in1_p,
4862 if (strict_overflow_p)
4863 fold_overflow_warning (warnmsg,
4864 WARN_STRICT_OVERFLOW_COMPARISON);
4865 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4866 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4875 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4876 bit value. Arrange things so the extra bits will be set to zero if and
4877 only if C is signed-extended to its full width. If MASK is nonzero,
4878 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4881 unextend (tree c, int p, int unsignedp, tree mask)
4883 tree type = TREE_TYPE (c);
4884 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4887 if (p == modesize || unsignedp)
4890 /* We work by getting just the sign bit into the low-order bit, then
4891 into the high-order bit, then sign-extend. We then XOR that value
4893 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4894 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4896 /* We must use a signed type in order to get an arithmetic right shift.
4897 However, we must also avoid introducing accidental overflows, so that
4898 a subsequent call to integer_zerop will work. Hence we must
4899 do the type conversion here. At this point, the constant is either
4900 zero or one, and the conversion to a signed type can never overflow.
4901 We could get an overflow if this conversion is done anywhere else. */
4902 if (TYPE_UNSIGNED (type))
4903 temp = fold_convert (signed_type_for (type), temp);
4905 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4906 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4908 temp = const_binop (BIT_AND_EXPR, temp,
4909 fold_convert (TREE_TYPE (c), mask));
4910 /* If necessary, convert the type back to match the type of C. */
4911 if (TYPE_UNSIGNED (type))
4912 temp = fold_convert (type, temp);
4914 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4917 /* For an expression that has the form
4921 we can drop one of the inner expressions and simplify to
4925 LOC is the location of the resulting expression. OP is the inner
4926 logical operation; the left-hand side in the examples above, while CMPOP
4927 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4928 removing a condition that guards another, as in
4929 (A != NULL && A->...) || A == NULL
4930 which we must not transform. If RHS_ONLY is true, only eliminate the
4931 right-most operand of the inner logical operation. */
4934 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4937 tree type = TREE_TYPE (cmpop);
4938 enum tree_code code = TREE_CODE (cmpop);
4939 enum tree_code truthop_code = TREE_CODE (op);
4940 tree lhs = TREE_OPERAND (op, 0);
4941 tree rhs = TREE_OPERAND (op, 1);
4942 tree orig_lhs = lhs, orig_rhs = rhs;
4943 enum tree_code rhs_code = TREE_CODE (rhs);
4944 enum tree_code lhs_code = TREE_CODE (lhs);
4945 enum tree_code inv_code;
4947 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4950 if (TREE_CODE_CLASS (code) != tcc_comparison)
4953 if (rhs_code == truthop_code)
4955 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4956 if (newrhs != NULL_TREE)
4959 rhs_code = TREE_CODE (rhs);
4962 if (lhs_code == truthop_code && !rhs_only)
4964 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4965 if (newlhs != NULL_TREE)
4968 lhs_code = TREE_CODE (lhs);
4972 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4973 if (inv_code == rhs_code
4974 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4975 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4977 if (!rhs_only && inv_code == lhs_code
4978 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4979 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4981 if (rhs != orig_rhs || lhs != orig_lhs)
4982 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
4987 /* Find ways of folding logical expressions of LHS and RHS:
4988 Try to merge two comparisons to the same innermost item.
4989 Look for range tests like "ch >= '0' && ch <= '9'".
4990 Look for combinations of simple terms on machines with expensive branches
4991 and evaluate the RHS unconditionally.
4993 For example, if we have p->a == 2 && p->b == 4 and we can make an
4994 object large enough to span both A and B, we can do this with a comparison
4995 against the object ANDed with the a mask.
4997 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4998 operations to do this with one comparison.
5000 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5001 function and the one above.
5003 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5004 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5006 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5009 We return the simplified tree or 0 if no optimization is possible. */
5012 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5015 /* If this is the "or" of two comparisons, we can do something if
5016 the comparisons are NE_EXPR. If this is the "and", we can do something
5017 if the comparisons are EQ_EXPR. I.e.,
5018 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5020 WANTED_CODE is this operation code. For single bit fields, we can
5021 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5022 comparison for one-bit fields. */
5024 enum tree_code wanted_code;
5025 enum tree_code lcode, rcode;
5026 tree ll_arg, lr_arg, rl_arg, rr_arg;
5027 tree ll_inner, lr_inner, rl_inner, rr_inner;
5028 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5029 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5030 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5031 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5032 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5033 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5034 enum machine_mode lnmode, rnmode;
5035 tree ll_mask, lr_mask, rl_mask, rr_mask;
5036 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5037 tree l_const, r_const;
5038 tree lntype, rntype, result;
5039 HOST_WIDE_INT first_bit, end_bit;
5041 tree orig_lhs = lhs, orig_rhs = rhs;
5042 enum tree_code orig_code = code;
5044 /* Start by getting the comparison codes. Fail if anything is volatile.
5045 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5046 it were surrounded with a NE_EXPR. */
5048 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5051 lcode = TREE_CODE (lhs);
5052 rcode = TREE_CODE (rhs);
5054 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5056 lhs = build2 (NE_EXPR, truth_type, lhs,
5057 build_int_cst (TREE_TYPE (lhs), 0));
5061 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5063 rhs = build2 (NE_EXPR, truth_type, rhs,
5064 build_int_cst (TREE_TYPE (rhs), 0));
5068 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5069 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5072 ll_arg = TREE_OPERAND (lhs, 0);
5073 lr_arg = TREE_OPERAND (lhs, 1);
5074 rl_arg = TREE_OPERAND (rhs, 0);
5075 rr_arg = TREE_OPERAND (rhs, 1);
5077 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5078 if (simple_operand_p (ll_arg)
5079 && simple_operand_p (lr_arg))
5081 if (operand_equal_p (ll_arg, rl_arg, 0)
5082 && operand_equal_p (lr_arg, rr_arg, 0))
5084 result = combine_comparisons (loc, code, lcode, rcode,
5085 truth_type, ll_arg, lr_arg);
5089 else if (operand_equal_p (ll_arg, rr_arg, 0)
5090 && operand_equal_p (lr_arg, rl_arg, 0))
5092 result = combine_comparisons (loc, code, lcode,
5093 swap_tree_comparison (rcode),
5094 truth_type, ll_arg, lr_arg);
5100 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5101 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5103 /* If the RHS can be evaluated unconditionally and its operands are
5104 simple, it wins to evaluate the RHS unconditionally on machines
5105 with expensive branches. In this case, this isn't a comparison
5106 that can be merged. Avoid doing this if the RHS is a floating-point
5107 comparison since those can trap. */
5109 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5111 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5112 && simple_operand_p (rl_arg)
5113 && simple_operand_p (rr_arg))
5115 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5116 if (code == TRUTH_OR_EXPR
5117 && lcode == NE_EXPR && integer_zerop (lr_arg)
5118 && rcode == NE_EXPR && integer_zerop (rr_arg)
5119 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5120 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5121 return build2_loc (loc, NE_EXPR, truth_type,
5122 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5124 build_int_cst (TREE_TYPE (ll_arg), 0));
5126 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5127 if (code == TRUTH_AND_EXPR
5128 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5129 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5130 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5131 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5132 return build2_loc (loc, EQ_EXPR, truth_type,
5133 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5135 build_int_cst (TREE_TYPE (ll_arg), 0));
5137 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5139 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5140 return build2_loc (loc, code, truth_type, lhs, rhs);
5145 /* See if the comparisons can be merged. Then get all the parameters for
5148 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5149 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5153 ll_inner = decode_field_reference (loc, ll_arg,
5154 &ll_bitsize, &ll_bitpos, &ll_mode,
5155 &ll_unsignedp, &volatilep, &ll_mask,
5157 lr_inner = decode_field_reference (loc, lr_arg,
5158 &lr_bitsize, &lr_bitpos, &lr_mode,
5159 &lr_unsignedp, &volatilep, &lr_mask,
5161 rl_inner = decode_field_reference (loc, rl_arg,
5162 &rl_bitsize, &rl_bitpos, &rl_mode,
5163 &rl_unsignedp, &volatilep, &rl_mask,
5165 rr_inner = decode_field_reference (loc, rr_arg,
5166 &rr_bitsize, &rr_bitpos, &rr_mode,
5167 &rr_unsignedp, &volatilep, &rr_mask,
5170 /* It must be true that the inner operation on the lhs of each
5171 comparison must be the same if we are to be able to do anything.
5172 Then see if we have constants. If not, the same must be true for
5174 if (volatilep || ll_inner == 0 || rl_inner == 0
5175 || ! operand_equal_p (ll_inner, rl_inner, 0))
5178 if (TREE_CODE (lr_arg) == INTEGER_CST
5179 && TREE_CODE (rr_arg) == INTEGER_CST)
5180 l_const = lr_arg, r_const = rr_arg;
5181 else if (lr_inner == 0 || rr_inner == 0
5182 || ! operand_equal_p (lr_inner, rr_inner, 0))
5185 l_const = r_const = 0;
5187 /* If either comparison code is not correct for our logical operation,
5188 fail. However, we can convert a one-bit comparison against zero into
5189 the opposite comparison against that bit being set in the field. */
5191 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5192 if (lcode != wanted_code)
5194 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5196 /* Make the left operand unsigned, since we are only interested
5197 in the value of one bit. Otherwise we are doing the wrong
5206 /* This is analogous to the code for l_const above. */
5207 if (rcode != wanted_code)
5209 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5218 /* See if we can find a mode that contains both fields being compared on
5219 the left. If we can't, fail. Otherwise, update all constants and masks
5220 to be relative to a field of that size. */
5221 first_bit = MIN (ll_bitpos, rl_bitpos);
5222 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5223 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5224 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5226 if (lnmode == VOIDmode)
5229 lnbitsize = GET_MODE_BITSIZE (lnmode);
5230 lnbitpos = first_bit & ~ (lnbitsize - 1);
5231 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5232 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5234 if (BYTES_BIG_ENDIAN)
5236 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5237 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5240 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5241 size_int (xll_bitpos));
5242 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5243 size_int (xrl_bitpos));
5247 l_const = fold_convert_loc (loc, lntype, l_const);
5248 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5249 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5250 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5251 fold_build1_loc (loc, BIT_NOT_EXPR,
5254 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5256 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5261 r_const = fold_convert_loc (loc, lntype, r_const);
5262 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5263 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5264 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5265 fold_build1_loc (loc, BIT_NOT_EXPR,
5268 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5270 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5274 /* If the right sides are not constant, do the same for it. Also,
5275 disallow this optimization if a size or signedness mismatch occurs
5276 between the left and right sides. */
5279 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5280 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5281 /* Make sure the two fields on the right
5282 correspond to the left without being swapped. */
5283 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5286 first_bit = MIN (lr_bitpos, rr_bitpos);
5287 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5288 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5289 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5291 if (rnmode == VOIDmode)
5294 rnbitsize = GET_MODE_BITSIZE (rnmode);
5295 rnbitpos = first_bit & ~ (rnbitsize - 1);
5296 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5297 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5299 if (BYTES_BIG_ENDIAN)
5301 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5302 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5305 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5307 size_int (xlr_bitpos));
5308 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5310 size_int (xrr_bitpos));
5312 /* Make a mask that corresponds to both fields being compared.
5313 Do this for both items being compared. If the operands are the
5314 same size and the bits being compared are in the same position
5315 then we can do this by masking both and comparing the masked
5317 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5318 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5319 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5321 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5322 ll_unsignedp || rl_unsignedp);
5323 if (! all_ones_mask_p (ll_mask, lnbitsize))
5324 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5326 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5327 lr_unsignedp || rr_unsignedp);
5328 if (! all_ones_mask_p (lr_mask, rnbitsize))
5329 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5331 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5334 /* There is still another way we can do something: If both pairs of
5335 fields being compared are adjacent, we may be able to make a wider
5336 field containing them both.
5338 Note that we still must mask the lhs/rhs expressions. Furthermore,
5339 the mask must be shifted to account for the shift done by
5340 make_bit_field_ref. */
5341 if ((ll_bitsize + ll_bitpos == rl_bitpos
5342 && lr_bitsize + lr_bitpos == rr_bitpos)
5343 || (ll_bitpos == rl_bitpos + rl_bitsize
5344 && lr_bitpos == rr_bitpos + rr_bitsize))
5348 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5349 ll_bitsize + rl_bitsize,
5350 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5351 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5352 lr_bitsize + rr_bitsize,
5353 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5355 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5356 size_int (MIN (xll_bitpos, xrl_bitpos)));
5357 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5358 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5360 /* Convert to the smaller type before masking out unwanted bits. */
5362 if (lntype != rntype)
5364 if (lnbitsize > rnbitsize)
5366 lhs = fold_convert_loc (loc, rntype, lhs);
5367 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5370 else if (lnbitsize < rnbitsize)
5372 rhs = fold_convert_loc (loc, lntype, rhs);
5373 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5378 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5379 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5381 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5382 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5384 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5390 /* Handle the case of comparisons with constants. If there is something in
5391 common between the masks, those bits of the constants must be the same.
5392 If not, the condition is always false. Test for this to avoid generating
5393 incorrect code below. */
5394 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5395 if (! integer_zerop (result)
5396 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5397 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5399 if (wanted_code == NE_EXPR)
5401 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5402 return constant_boolean_node (true, truth_type);
5406 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5407 return constant_boolean_node (false, truth_type);
5411 /* Construct the expression we will return. First get the component
5412 reference we will make. Unless the mask is all ones the width of
5413 that field, perform the mask operation. Then compare with the
5415 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5416 ll_unsignedp || rl_unsignedp);
5418 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5419 if (! all_ones_mask_p (ll_mask, lnbitsize))
5420 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5422 return build2_loc (loc, wanted_code, truth_type, result,
5423 const_binop (BIT_IOR_EXPR, l_const, r_const));
5426 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5430 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5434 enum tree_code op_code;
5437 int consts_equal, consts_lt;
5440 STRIP_SIGN_NOPS (arg0);
5442 op_code = TREE_CODE (arg0);
5443 minmax_const = TREE_OPERAND (arg0, 1);
5444 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5445 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5446 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5447 inner = TREE_OPERAND (arg0, 0);
5449 /* If something does not permit us to optimize, return the original tree. */
5450 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5451 || TREE_CODE (comp_const) != INTEGER_CST
5452 || TREE_OVERFLOW (comp_const)
5453 || TREE_CODE (minmax_const) != INTEGER_CST
5454 || TREE_OVERFLOW (minmax_const))
5457 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5458 and GT_EXPR, doing the rest with recursive calls using logical
5462 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5465 = optimize_minmax_comparison (loc,
5466 invert_tree_comparison (code, false),
5469 return invert_truthvalue_loc (loc, tem);
5475 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5476 optimize_minmax_comparison
5477 (loc, EQ_EXPR, type, arg0, comp_const),
5478 optimize_minmax_comparison
5479 (loc, GT_EXPR, type, arg0, comp_const));
5482 if (op_code == MAX_EXPR && consts_equal)
5483 /* MAX (X, 0) == 0 -> X <= 0 */
5484 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5486 else if (op_code == MAX_EXPR && consts_lt)
5487 /* MAX (X, 0) == 5 -> X == 5 */
5488 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5490 else if (op_code == MAX_EXPR)
5491 /* MAX (X, 0) == -1 -> false */
5492 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5494 else if (consts_equal)
5495 /* MIN (X, 0) == 0 -> X >= 0 */
5496 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5499 /* MIN (X, 0) == 5 -> false */
5500 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5503 /* MIN (X, 0) == -1 -> X == -1 */
5504 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5507 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5508 /* MAX (X, 0) > 0 -> X > 0
5509 MAX (X, 0) > 5 -> X > 5 */
5510 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5512 else if (op_code == MAX_EXPR)
5513 /* MAX (X, 0) > -1 -> true */
5514 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5516 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5517 /* MIN (X, 0) > 0 -> false
5518 MIN (X, 0) > 5 -> false */
5519 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5522 /* MIN (X, 0) > -1 -> X > -1 */
5523 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5530 /* T is an integer expression that is being multiplied, divided, or taken a
5531 modulus (CODE says which and what kind of divide or modulus) by a
5532 constant C. See if we can eliminate that operation by folding it with
5533 other operations already in T. WIDE_TYPE, if non-null, is a type that
5534 should be used for the computation if wider than our type.
5536 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5537 (X * 2) + (Y * 4). We must, however, be assured that either the original
5538 expression would not overflow or that overflow is undefined for the type
5539 in the language in question.
5541 If we return a non-null expression, it is an equivalent form of the
5542 original computation, but need not be in the original type.
5544 We set *STRICT_OVERFLOW_P to true if the return values depends on
5545 signed overflow being undefined. Otherwise we do not change
5546 *STRICT_OVERFLOW_P. */
5549 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5550 bool *strict_overflow_p)
5552 /* To avoid exponential search depth, refuse to allow recursion past
5553 three levels. Beyond that (1) it's highly unlikely that we'll find
5554 something interesting and (2) we've probably processed it before
5555 when we built the inner expression. */
5564 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5571 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5572 bool *strict_overflow_p)
5574 tree type = TREE_TYPE (t);
5575 enum tree_code tcode = TREE_CODE (t);
5576 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5577 > GET_MODE_SIZE (TYPE_MODE (type)))
5578 ? wide_type : type);
5580 int same_p = tcode == code;
5581 tree op0 = NULL_TREE, op1 = NULL_TREE;
5582 bool sub_strict_overflow_p;
5584 /* Don't deal with constants of zero here; they confuse the code below. */
5585 if (integer_zerop (c))
5588 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5589 op0 = TREE_OPERAND (t, 0);
5591 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5592 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5594 /* Note that we need not handle conditional operations here since fold
5595 already handles those cases. So just do arithmetic here. */
5599 /* For a constant, we can always simplify if we are a multiply
5600 or (for divide and modulus) if it is a multiple of our constant. */
5601 if (code == MULT_EXPR
5602 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5603 return const_binop (code, fold_convert (ctype, t),
5604 fold_convert (ctype, c));
5607 CASE_CONVERT: case NON_LVALUE_EXPR:
5608 /* If op0 is an expression ... */
5609 if ((COMPARISON_CLASS_P (op0)
5610 || UNARY_CLASS_P (op0)
5611 || BINARY_CLASS_P (op0)
5612 || VL_EXP_CLASS_P (op0)
5613 || EXPRESSION_CLASS_P (op0))
5614 /* ... and has wrapping overflow, and its type is smaller
5615 than ctype, then we cannot pass through as widening. */
5616 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5617 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5618 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5619 && (TYPE_PRECISION (ctype)
5620 > TYPE_PRECISION (TREE_TYPE (op0))))
5621 /* ... or this is a truncation (t is narrower than op0),
5622 then we cannot pass through this narrowing. */
5623 || (TYPE_PRECISION (type)
5624 < TYPE_PRECISION (TREE_TYPE (op0)))
5625 /* ... or signedness changes for division or modulus,
5626 then we cannot pass through this conversion. */
5627 || (code != MULT_EXPR
5628 && (TYPE_UNSIGNED (ctype)
5629 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5630 /* ... or has undefined overflow while the converted to
5631 type has not, we cannot do the operation in the inner type
5632 as that would introduce undefined overflow. */
5633 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5634 && !TYPE_OVERFLOW_UNDEFINED (type))))
5637 /* Pass the constant down and see if we can make a simplification. If
5638 we can, replace this expression with the inner simplification for
5639 possible later conversion to our or some other type. */
5640 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5641 && TREE_CODE (t2) == INTEGER_CST
5642 && !TREE_OVERFLOW (t2)
5643 && (0 != (t1 = extract_muldiv (op0, t2, code,
5645 ? ctype : NULL_TREE,
5646 strict_overflow_p))))
5651 /* If widening the type changes it from signed to unsigned, then we
5652 must avoid building ABS_EXPR itself as unsigned. */
5653 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5655 tree cstype = (*signed_type_for) (ctype);
5656 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5659 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5660 return fold_convert (ctype, t1);
5664 /* If the constant is negative, we cannot simplify this. */
5665 if (tree_int_cst_sgn (c) == -1)
5669 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5671 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5674 case MIN_EXPR: case MAX_EXPR:
5675 /* If widening the type changes the signedness, then we can't perform
5676 this optimization as that changes the result. */
5677 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5680 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5681 sub_strict_overflow_p = false;
5682 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5683 &sub_strict_overflow_p)) != 0
5684 && (t2 = extract_muldiv (op1, c, code, wide_type,
5685 &sub_strict_overflow_p)) != 0)
5687 if (tree_int_cst_sgn (c) < 0)
5688 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5689 if (sub_strict_overflow_p)
5690 *strict_overflow_p = true;
5691 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5692 fold_convert (ctype, t2));
5696 case LSHIFT_EXPR: case RSHIFT_EXPR:
5697 /* If the second operand is constant, this is a multiplication
5698 or floor division, by a power of two, so we can treat it that
5699 way unless the multiplier or divisor overflows. Signed
5700 left-shift overflow is implementation-defined rather than
5701 undefined in C90, so do not convert signed left shift into
5703 if (TREE_CODE (op1) == INTEGER_CST
5704 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5705 /* const_binop may not detect overflow correctly,
5706 so check for it explicitly here. */
5707 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5708 && TREE_INT_CST_HIGH (op1) == 0
5709 && 0 != (t1 = fold_convert (ctype,
5710 const_binop (LSHIFT_EXPR,
5713 && !TREE_OVERFLOW (t1))
5714 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5715 ? MULT_EXPR : FLOOR_DIV_EXPR,
5717 fold_convert (ctype, op0),
5719 c, code, wide_type, strict_overflow_p);
5722 case PLUS_EXPR: case MINUS_EXPR:
5723 /* See if we can eliminate the operation on both sides. If we can, we
5724 can return a new PLUS or MINUS. If we can't, the only remaining
5725 cases where we can do anything are if the second operand is a
5727 sub_strict_overflow_p = false;
5728 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5729 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5730 if (t1 != 0 && t2 != 0
5731 && (code == MULT_EXPR
5732 /* If not multiplication, we can only do this if both operands
5733 are divisible by c. */
5734 || (multiple_of_p (ctype, op0, c)
5735 && multiple_of_p (ctype, op1, c))))
5737 if (sub_strict_overflow_p)
5738 *strict_overflow_p = true;
5739 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5740 fold_convert (ctype, t2));
5743 /* If this was a subtraction, negate OP1 and set it to be an addition.
5744 This simplifies the logic below. */
5745 if (tcode == MINUS_EXPR)
5747 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5748 /* If OP1 was not easily negatable, the constant may be OP0. */
5749 if (TREE_CODE (op0) == INTEGER_CST)
5760 if (TREE_CODE (op1) != INTEGER_CST)
5763 /* If either OP1 or C are negative, this optimization is not safe for
5764 some of the division and remainder types while for others we need
5765 to change the code. */
5766 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5768 if (code == CEIL_DIV_EXPR)
5769 code = FLOOR_DIV_EXPR;
5770 else if (code == FLOOR_DIV_EXPR)
5771 code = CEIL_DIV_EXPR;
5772 else if (code != MULT_EXPR
5773 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5777 /* If it's a multiply or a division/modulus operation of a multiple
5778 of our constant, do the operation and verify it doesn't overflow. */
5779 if (code == MULT_EXPR
5780 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5782 op1 = const_binop (code, fold_convert (ctype, op1),
5783 fold_convert (ctype, c));
5784 /* We allow the constant to overflow with wrapping semantics. */
5786 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5792 /* If we have an unsigned type is not a sizetype, we cannot widen
5793 the operation since it will change the result if the original
5794 computation overflowed. */
5795 if (TYPE_UNSIGNED (ctype)
5796 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5800 /* If we were able to eliminate our operation from the first side,
5801 apply our operation to the second side and reform the PLUS. */
5802 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5803 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5805 /* The last case is if we are a multiply. In that case, we can
5806 apply the distributive law to commute the multiply and addition
5807 if the multiplication of the constants doesn't overflow. */
5808 if (code == MULT_EXPR)
5809 return fold_build2 (tcode, ctype,
5810 fold_build2 (code, ctype,
5811 fold_convert (ctype, op0),
5812 fold_convert (ctype, c)),
5818 /* We have a special case here if we are doing something like
5819 (C * 8) % 4 since we know that's zero. */
5820 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5821 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5822 /* If the multiplication can overflow we cannot optimize this.
5823 ??? Until we can properly mark individual operations as
5824 not overflowing we need to treat sizetype special here as
5825 stor-layout relies on this opimization to make
5826 DECL_FIELD_BIT_OFFSET always a constant. */
5827 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5828 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5829 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5830 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5831 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5833 *strict_overflow_p = true;
5834 return omit_one_operand (type, integer_zero_node, op0);
5837 /* ... fall through ... */
5839 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5840 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5841 /* If we can extract our operation from the LHS, do so and return a
5842 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5843 do something only if the second operand is a constant. */
5845 && (t1 = extract_muldiv (op0, c, code, wide_type,
5846 strict_overflow_p)) != 0)
5847 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5848 fold_convert (ctype, op1));
5849 else if (tcode == MULT_EXPR && code == MULT_EXPR
5850 && (t1 = extract_muldiv (op1, c, code, wide_type,
5851 strict_overflow_p)) != 0)
5852 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5853 fold_convert (ctype, t1));
5854 else if (TREE_CODE (op1) != INTEGER_CST)
5857 /* If these are the same operation types, we can associate them
5858 assuming no overflow. */
5863 mul = double_int_mul_with_sign
5865 (tree_to_double_int (op1),
5866 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5868 (tree_to_double_int (c),
5869 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5870 false, &overflow_p);
5871 overflow_p = (((!TYPE_UNSIGNED (ctype)
5872 || (TREE_CODE (ctype) == INTEGER_TYPE
5873 && TYPE_IS_SIZETYPE (ctype)))
5875 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5876 if (!double_int_fits_to_tree_p (ctype, mul)
5877 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5878 || !TYPE_UNSIGNED (ctype)
5879 || (TREE_CODE (ctype) == INTEGER_TYPE
5880 && TYPE_IS_SIZETYPE (ctype))))
5883 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5884 double_int_to_tree (ctype, mul));
5887 /* If these operations "cancel" each other, we have the main
5888 optimizations of this pass, which occur when either constant is a
5889 multiple of the other, in which case we replace this with either an
5890 operation or CODE or TCODE.
5892 If we have an unsigned type that is not a sizetype, we cannot do
5893 this since it will change the result if the original computation
5895 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5896 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5897 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5898 || (tcode == MULT_EXPR
5899 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5900 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5901 && code != MULT_EXPR)))
5903 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5905 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5906 *strict_overflow_p = true;
5907 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5908 fold_convert (ctype,
5909 const_binop (TRUNC_DIV_EXPR,
5912 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5914 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5915 *strict_overflow_p = true;
5916 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5917 fold_convert (ctype,
5918 const_binop (TRUNC_DIV_EXPR,
5931 /* Return a node which has the indicated constant VALUE (either 0 or
5932 1), and is of the indicated TYPE. */
5935 constant_boolean_node (int value, tree type)
5937 if (type == integer_type_node)
5938 return value ? integer_one_node : integer_zero_node;
5939 else if (type == boolean_type_node)
5940 return value ? boolean_true_node : boolean_false_node;
5942 return build_int_cst (type, value);
5946 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5947 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5948 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5949 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5950 COND is the first argument to CODE; otherwise (as in the example
5951 given here), it is the second argument. TYPE is the type of the
5952 original expression. Return NULL_TREE if no simplification is
5956 fold_binary_op_with_conditional_arg (location_t loc,
5957 enum tree_code code,
5958 tree type, tree op0, tree op1,
5959 tree cond, tree arg, int cond_first_p)
5961 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5962 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5963 tree test, true_value, false_value;
5964 tree lhs = NULL_TREE;
5965 tree rhs = NULL_TREE;
5967 if (TREE_CODE (cond) == COND_EXPR)
5969 test = TREE_OPERAND (cond, 0);
5970 true_value = TREE_OPERAND (cond, 1);
5971 false_value = TREE_OPERAND (cond, 2);
5972 /* If this operand throws an expression, then it does not make
5973 sense to try to perform a logical or arithmetic operation
5975 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5977 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5982 tree testtype = TREE_TYPE (cond);
5984 true_value = constant_boolean_node (true, testtype);
5985 false_value = constant_boolean_node (false, testtype);
5988 /* This transformation is only worthwhile if we don't have to wrap ARG
5989 in a SAVE_EXPR and the operation can be simplified on at least one
5990 of the branches once its pushed inside the COND_EXPR. */
5991 if (!TREE_CONSTANT (arg)
5992 && (TREE_SIDE_EFFECTS (arg)
5993 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5996 arg = fold_convert_loc (loc, arg_type, arg);
5999 true_value = fold_convert_loc (loc, cond_type, true_value);
6001 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6003 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6007 false_value = fold_convert_loc (loc, cond_type, false_value);
6009 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6011 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6014 /* Check that we have simplified at least one of the branches. */
6015 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6018 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6022 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6024 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6025 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6026 ADDEND is the same as X.
6028 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6029 and finite. The problematic cases are when X is zero, and its mode
6030 has signed zeros. In the case of rounding towards -infinity,
6031 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6032 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6035 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6037 if (!real_zerop (addend))
6040 /* Don't allow the fold with -fsignaling-nans. */
6041 if (HONOR_SNANS (TYPE_MODE (type)))
6044 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6045 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6048 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6049 if (TREE_CODE (addend) == REAL_CST
6050 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6053 /* The mode has signed zeros, and we have to honor their sign.
6054 In this situation, there is only one case we can return true for.
6055 X - 0 is the same as X unless rounding towards -infinity is
6057 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6060 /* Subroutine of fold() that checks comparisons of built-in math
6061 functions against real constants.
6063 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6064 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6065 is the type of the result and ARG0 and ARG1 are the operands of the
6066 comparison. ARG1 must be a TREE_REAL_CST.
6068 The function returns the constant folded tree if a simplification
6069 can be made, and NULL_TREE otherwise. */
6072 fold_mathfn_compare (location_t loc,
6073 enum built_in_function fcode, enum tree_code code,
6074 tree type, tree arg0, tree arg1)
6078 if (BUILTIN_SQRT_P (fcode))
6080 tree arg = CALL_EXPR_ARG (arg0, 0);
6081 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6083 c = TREE_REAL_CST (arg1);
6084 if (REAL_VALUE_NEGATIVE (c))
6086 /* sqrt(x) < y is always false, if y is negative. */
6087 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6088 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6090 /* sqrt(x) > y is always true, if y is negative and we
6091 don't care about NaNs, i.e. negative values of x. */
6092 if (code == NE_EXPR || !HONOR_NANS (mode))
6093 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6095 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6096 return fold_build2_loc (loc, GE_EXPR, type, arg,
6097 build_real (TREE_TYPE (arg), dconst0));
6099 else if (code == GT_EXPR || code == GE_EXPR)
6103 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6104 real_convert (&c2, mode, &c2);
6106 if (REAL_VALUE_ISINF (c2))
6108 /* sqrt(x) > y is x == +Inf, when y is very large. */
6109 if (HONOR_INFINITIES (mode))
6110 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6111 build_real (TREE_TYPE (arg), c2));
6113 /* sqrt(x) > y is always false, when y is very large
6114 and we don't care about infinities. */
6115 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6118 /* sqrt(x) > c is the same as x > c*c. */
6119 return fold_build2_loc (loc, code, type, arg,
6120 build_real (TREE_TYPE (arg), c2));
6122 else if (code == LT_EXPR || code == LE_EXPR)
6126 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6127 real_convert (&c2, mode, &c2);
6129 if (REAL_VALUE_ISINF (c2))
6131 /* sqrt(x) < y is always true, when y is a very large
6132 value and we don't care about NaNs or Infinities. */
6133 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6134 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6136 /* sqrt(x) < y is x != +Inf when y is very large and we
6137 don't care about NaNs. */
6138 if (! HONOR_NANS (mode))
6139 return fold_build2_loc (loc, NE_EXPR, type, arg,
6140 build_real (TREE_TYPE (arg), c2));
6142 /* sqrt(x) < y is x >= 0 when y is very large and we
6143 don't care about Infinities. */
6144 if (! HONOR_INFINITIES (mode))
6145 return fold_build2_loc (loc, GE_EXPR, type, arg,
6146 build_real (TREE_TYPE (arg), dconst0));
6148 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6149 arg = save_expr (arg);
6150 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6151 fold_build2_loc (loc, GE_EXPR, type, arg,
6152 build_real (TREE_TYPE (arg),
6154 fold_build2_loc (loc, NE_EXPR, type, arg,
6155 build_real (TREE_TYPE (arg),
6159 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6160 if (! HONOR_NANS (mode))
6161 return fold_build2_loc (loc, code, type, arg,
6162 build_real (TREE_TYPE (arg), c2));
6164 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6165 arg = save_expr (arg);
6166 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6167 fold_build2_loc (loc, GE_EXPR, type, arg,
6168 build_real (TREE_TYPE (arg),
6170 fold_build2_loc (loc, code, type, arg,
6171 build_real (TREE_TYPE (arg),
6179 /* Subroutine of fold() that optimizes comparisons against Infinities,
6180 either +Inf or -Inf.
6182 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6183 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6184 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6186 The function returns the constant folded tree if a simplification
6187 can be made, and NULL_TREE otherwise. */
6190 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6191 tree arg0, tree arg1)
6193 enum machine_mode mode;
6194 REAL_VALUE_TYPE max;
6198 mode = TYPE_MODE (TREE_TYPE (arg0));
6200 /* For negative infinity swap the sense of the comparison. */
6201 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6203 code = swap_tree_comparison (code);
6208 /* x > +Inf is always false, if with ignore sNANs. */
6209 if (HONOR_SNANS (mode))
6211 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6214 /* x <= +Inf is always true, if we don't case about NaNs. */
6215 if (! HONOR_NANS (mode))
6216 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6218 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6219 arg0 = save_expr (arg0);
6220 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6224 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6225 real_maxval (&max, neg, mode);
6226 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6227 arg0, build_real (TREE_TYPE (arg0), max));
6230 /* x < +Inf is always equal to x <= DBL_MAX. */
6231 real_maxval (&max, neg, mode);
6232 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6233 arg0, build_real (TREE_TYPE (arg0), max));
6236 /* x != +Inf is always equal to !(x > DBL_MAX). */
6237 real_maxval (&max, neg, mode);
6238 if (! HONOR_NANS (mode))
6239 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6240 arg0, build_real (TREE_TYPE (arg0), max));
6242 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6243 arg0, build_real (TREE_TYPE (arg0), max));
6244 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6253 /* Subroutine of fold() that optimizes comparisons of a division by
6254 a nonzero integer constant against an integer constant, i.e.
6257 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6258 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6259 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6261 The function returns the constant folded tree if a simplification
6262 can be made, and NULL_TREE otherwise. */
6265 fold_div_compare (location_t loc,
6266 enum tree_code code, tree type, tree arg0, tree arg1)
6268 tree prod, tmp, hi, lo;
6269 tree arg00 = TREE_OPERAND (arg0, 0);
6270 tree arg01 = TREE_OPERAND (arg0, 1);
6272 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6276 /* We have to do this the hard way to detect unsigned overflow.
6277 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6278 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6279 TREE_INT_CST_HIGH (arg01),
6280 TREE_INT_CST_LOW (arg1),
6281 TREE_INT_CST_HIGH (arg1),
6282 &val.low, &val.high, unsigned_p);
6283 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6284 neg_overflow = false;
6288 tmp = int_const_binop (MINUS_EXPR, arg01,
6289 build_int_cst (TREE_TYPE (arg01), 1));
6292 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6293 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6294 TREE_INT_CST_HIGH (prod),
6295 TREE_INT_CST_LOW (tmp),
6296 TREE_INT_CST_HIGH (tmp),
6297 &val.low, &val.high, unsigned_p);
6298 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6299 -1, overflow | TREE_OVERFLOW (prod));
6301 else if (tree_int_cst_sgn (arg01) >= 0)
6303 tmp = int_const_binop (MINUS_EXPR, arg01,
6304 build_int_cst (TREE_TYPE (arg01), 1));
6305 switch (tree_int_cst_sgn (arg1))
6308 neg_overflow = true;
6309 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6314 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6319 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6329 /* A negative divisor reverses the relational operators. */
6330 code = swap_tree_comparison (code);
6332 tmp = int_const_binop (PLUS_EXPR, arg01,
6333 build_int_cst (TREE_TYPE (arg01), 1));
6334 switch (tree_int_cst_sgn (arg1))
6337 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6342 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6347 neg_overflow = true;
6348 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6360 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6361 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6362 if (TREE_OVERFLOW (hi))
6363 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6364 if (TREE_OVERFLOW (lo))
6365 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6366 return build_range_check (loc, type, arg00, 1, lo, hi);
6369 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6370 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6371 if (TREE_OVERFLOW (hi))
6372 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6373 if (TREE_OVERFLOW (lo))
6374 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6375 return build_range_check (loc, type, arg00, 0, lo, hi);
6378 if (TREE_OVERFLOW (lo))
6380 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6381 return omit_one_operand_loc (loc, type, tmp, arg00);
6383 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6386 if (TREE_OVERFLOW (hi))
6388 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6389 return omit_one_operand_loc (loc, type, tmp, arg00);
6391 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6394 if (TREE_OVERFLOW (hi))
6396 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6397 return omit_one_operand_loc (loc, type, tmp, arg00);
6399 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6402 if (TREE_OVERFLOW (lo))
6404 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6405 return omit_one_operand_loc (loc, type, tmp, arg00);
6407 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6417 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6418 equality/inequality test, then return a simplified form of the test
6419 using a sign testing. Otherwise return NULL. TYPE is the desired
6423 fold_single_bit_test_into_sign_test (location_t loc,
6424 enum tree_code code, tree arg0, tree arg1,
6427 /* If this is testing a single bit, we can optimize the test. */
6428 if ((code == NE_EXPR || code == EQ_EXPR)
6429 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6430 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6432 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6433 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6434 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6436 if (arg00 != NULL_TREE
6437 /* This is only a win if casting to a signed type is cheap,
6438 i.e. when arg00's type is not a partial mode. */
6439 && TYPE_PRECISION (TREE_TYPE (arg00))
6440 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6442 tree stype = signed_type_for (TREE_TYPE (arg00));
6443 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6445 fold_convert_loc (loc, stype, arg00),
6446 build_int_cst (stype, 0));
6453 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6454 equality/inequality test, then return a simplified form of
6455 the test using shifts and logical operations. Otherwise return
6456 NULL. TYPE is the desired result type. */
6459 fold_single_bit_test (location_t loc, enum tree_code code,
6460 tree arg0, tree arg1, tree result_type)
6462 /* If this is testing a single bit, we can optimize the test. */
6463 if ((code == NE_EXPR || code == EQ_EXPR)
6464 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6465 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6467 tree inner = TREE_OPERAND (arg0, 0);
6468 tree type = TREE_TYPE (arg0);
6469 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6470 enum machine_mode operand_mode = TYPE_MODE (type);
6472 tree signed_type, unsigned_type, intermediate_type;
6475 /* First, see if we can fold the single bit test into a sign-bit
6477 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6482 /* Otherwise we have (A & C) != 0 where C is a single bit,
6483 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6484 Similarly for (A & C) == 0. */
6486 /* If INNER is a right shift of a constant and it plus BITNUM does
6487 not overflow, adjust BITNUM and INNER. */
6488 if (TREE_CODE (inner) == RSHIFT_EXPR
6489 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6490 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6491 && bitnum < TYPE_PRECISION (type)
6492 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6493 bitnum - TYPE_PRECISION (type)))
6495 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6496 inner = TREE_OPERAND (inner, 0);
6499 /* If we are going to be able to omit the AND below, we must do our
6500 operations as unsigned. If we must use the AND, we have a choice.
6501 Normally unsigned is faster, but for some machines signed is. */
6502 #ifdef LOAD_EXTEND_OP
6503 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6504 && !flag_syntax_only) ? 0 : 1;
6509 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6510 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6511 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6512 inner = fold_convert_loc (loc, intermediate_type, inner);
6515 inner = build2 (RSHIFT_EXPR, intermediate_type,
6516 inner, size_int (bitnum));
6518 one = build_int_cst (intermediate_type, 1);
6520 if (code == EQ_EXPR)
6521 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6523 /* Put the AND last so it can combine with more things. */
6524 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6526 /* Make sure to return the proper type. */
6527 inner = fold_convert_loc (loc, result_type, inner);
6534 /* Check whether we are allowed to reorder operands arg0 and arg1,
6535 such that the evaluation of arg1 occurs before arg0. */
6538 reorder_operands_p (const_tree arg0, const_tree arg1)
6540 if (! flag_evaluation_order)
6542 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6544 return ! TREE_SIDE_EFFECTS (arg0)
6545 && ! TREE_SIDE_EFFECTS (arg1);
6548 /* Test whether it is preferable two swap two operands, ARG0 and
6549 ARG1, for example because ARG0 is an integer constant and ARG1
6550 isn't. If REORDER is true, only recommend swapping if we can
6551 evaluate the operands in reverse order. */
6554 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6556 STRIP_SIGN_NOPS (arg0);
6557 STRIP_SIGN_NOPS (arg1);
6559 if (TREE_CODE (arg1) == INTEGER_CST)
6561 if (TREE_CODE (arg0) == INTEGER_CST)
6564 if (TREE_CODE (arg1) == REAL_CST)
6566 if (TREE_CODE (arg0) == REAL_CST)
6569 if (TREE_CODE (arg1) == FIXED_CST)
6571 if (TREE_CODE (arg0) == FIXED_CST)
6574 if (TREE_CODE (arg1) == COMPLEX_CST)
6576 if (TREE_CODE (arg0) == COMPLEX_CST)
6579 if (TREE_CONSTANT (arg1))
6581 if (TREE_CONSTANT (arg0))
6584 if (optimize_function_for_size_p (cfun))
6587 if (reorder && flag_evaluation_order
6588 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6591 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6592 for commutative and comparison operators. Ensuring a canonical
6593 form allows the optimizers to find additional redundancies without
6594 having to explicitly check for both orderings. */
6595 if (TREE_CODE (arg0) == SSA_NAME
6596 && TREE_CODE (arg1) == SSA_NAME
6597 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6600 /* Put SSA_NAMEs last. */
6601 if (TREE_CODE (arg1) == SSA_NAME)
6603 if (TREE_CODE (arg0) == SSA_NAME)
6606 /* Put variables last. */
6615 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6616 ARG0 is extended to a wider type. */
6619 fold_widened_comparison (location_t loc, enum tree_code code,
6620 tree type, tree arg0, tree arg1)
6622 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6624 tree shorter_type, outer_type;
6628 if (arg0_unw == arg0)
6630 shorter_type = TREE_TYPE (arg0_unw);
6632 #ifdef HAVE_canonicalize_funcptr_for_compare
6633 /* Disable this optimization if we're casting a function pointer
6634 type on targets that require function pointer canonicalization. */
6635 if (HAVE_canonicalize_funcptr_for_compare
6636 && TREE_CODE (shorter_type) == POINTER_TYPE
6637 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6641 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6644 arg1_unw = get_unwidened (arg1, NULL_TREE);
6646 /* If possible, express the comparison in the shorter mode. */
6647 if ((code == EQ_EXPR || code == NE_EXPR
6648 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6649 && (TREE_TYPE (arg1_unw) == shorter_type
6650 || ((TYPE_PRECISION (shorter_type)
6651 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6652 && (TYPE_UNSIGNED (shorter_type)
6653 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6654 || (TREE_CODE (arg1_unw) == INTEGER_CST
6655 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6656 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6657 && int_fits_type_p (arg1_unw, shorter_type))))
6658 return fold_build2_loc (loc, code, type, arg0_unw,
6659 fold_convert_loc (loc, shorter_type, arg1_unw));
6661 if (TREE_CODE (arg1_unw) != INTEGER_CST
6662 || TREE_CODE (shorter_type) != INTEGER_TYPE
6663 || !int_fits_type_p (arg1_unw, shorter_type))
6666 /* If we are comparing with the integer that does not fit into the range
6667 of the shorter type, the result is known. */
6668 outer_type = TREE_TYPE (arg1_unw);
6669 min = lower_bound_in_type (outer_type, shorter_type);
6670 max = upper_bound_in_type (outer_type, shorter_type);
6672 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6674 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6681 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6686 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6692 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6694 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6699 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6701 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6710 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6711 ARG0 just the signedness is changed. */
6714 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6715 tree arg0, tree arg1)
6718 tree inner_type, outer_type;
6720 if (!CONVERT_EXPR_P (arg0))
6723 outer_type = TREE_TYPE (arg0);
6724 arg0_inner = TREE_OPERAND (arg0, 0);
6725 inner_type = TREE_TYPE (arg0_inner);
6727 #ifdef HAVE_canonicalize_funcptr_for_compare
6728 /* Disable this optimization if we're casting a function pointer
6729 type on targets that require function pointer canonicalization. */
6730 if (HAVE_canonicalize_funcptr_for_compare
6731 && TREE_CODE (inner_type) == POINTER_TYPE
6732 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6736 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6739 if (TREE_CODE (arg1) != INTEGER_CST
6740 && !(CONVERT_EXPR_P (arg1)
6741 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6744 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6745 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6750 if (TREE_CODE (arg1) == INTEGER_CST)
6751 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6752 0, TREE_OVERFLOW (arg1));
6754 arg1 = fold_convert_loc (loc, inner_type, arg1);
6756 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6759 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6760 step of the array. Reconstructs s and delta in the case of s *
6761 delta being an integer constant (and thus already folded). ADDR is
6762 the address. MULT is the multiplicative expression. If the
6763 function succeeds, the new address expression is returned.
6764 Otherwise NULL_TREE is returned. LOC is the location of the
6765 resulting expression. */
6768 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6770 tree s, delta, step;
6771 tree ref = TREE_OPERAND (addr, 0), pref;
6776 /* Strip the nops that might be added when converting op1 to sizetype. */
6779 /* Canonicalize op1 into a possibly non-constant delta
6780 and an INTEGER_CST s. */
6781 if (TREE_CODE (op1) == MULT_EXPR)
6783 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6788 if (TREE_CODE (arg0) == INTEGER_CST)
6793 else if (TREE_CODE (arg1) == INTEGER_CST)
6801 else if (TREE_CODE (op1) == INTEGER_CST)
6808 /* Simulate we are delta * 1. */
6810 s = integer_one_node;
6813 for (;; ref = TREE_OPERAND (ref, 0))
6815 if (TREE_CODE (ref) == ARRAY_REF)
6819 /* Remember if this was a multi-dimensional array. */
6820 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6823 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6826 itype = TREE_TYPE (domain);
6828 step = array_ref_element_size (ref);
6829 if (TREE_CODE (step) != INTEGER_CST)
6834 if (! tree_int_cst_equal (step, s))
6839 /* Try if delta is a multiple of step. */
6840 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6846 /* Only fold here if we can verify we do not overflow one
6847 dimension of a multi-dimensional array. */
6852 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6853 || !TYPE_MAX_VALUE (domain)
6854 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6857 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6858 fold_convert_loc (loc, itype,
6859 TREE_OPERAND (ref, 1)),
6860 fold_convert_loc (loc, itype, delta));
6862 || TREE_CODE (tmp) != INTEGER_CST
6863 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6872 if (!handled_component_p (ref))
6876 /* We found the suitable array reference. So copy everything up to it,
6877 and replace the index. */
6879 pref = TREE_OPERAND (addr, 0);
6880 ret = copy_node (pref);
6881 SET_EXPR_LOCATION (ret, loc);
6886 pref = TREE_OPERAND (pref, 0);
6887 TREE_OPERAND (pos, 0) = copy_node (pref);
6888 pos = TREE_OPERAND (pos, 0);
6891 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6892 fold_convert_loc (loc, itype,
6893 TREE_OPERAND (pos, 1)),
6894 fold_convert_loc (loc, itype, delta));
6896 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6900 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6901 means A >= Y && A != MAX, but in this case we know that
6902 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6905 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6907 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6909 if (TREE_CODE (bound) == LT_EXPR)
6910 a = TREE_OPERAND (bound, 0);
6911 else if (TREE_CODE (bound) == GT_EXPR)
6912 a = TREE_OPERAND (bound, 1);
6916 typea = TREE_TYPE (a);
6917 if (!INTEGRAL_TYPE_P (typea)
6918 && !POINTER_TYPE_P (typea))
6921 if (TREE_CODE (ineq) == LT_EXPR)
6923 a1 = TREE_OPERAND (ineq, 1);
6924 y = TREE_OPERAND (ineq, 0);
6926 else if (TREE_CODE (ineq) == GT_EXPR)
6928 a1 = TREE_OPERAND (ineq, 0);
6929 y = TREE_OPERAND (ineq, 1);
6934 if (TREE_TYPE (a1) != typea)
6937 if (POINTER_TYPE_P (typea))
6939 /* Convert the pointer types into integer before taking the difference. */
6940 tree ta = fold_convert_loc (loc, ssizetype, a);
6941 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6942 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6945 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6947 if (!diff || !integer_onep (diff))
6950 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6953 /* Fold a sum or difference of at least one multiplication.
6954 Returns the folded tree or NULL if no simplification could be made. */
6957 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6958 tree arg0, tree arg1)
6960 tree arg00, arg01, arg10, arg11;
6961 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6963 /* (A * C) +- (B * C) -> (A+-B) * C.
6964 (A * C) +- A -> A * (C+-1).
6965 We are most concerned about the case where C is a constant,
6966 but other combinations show up during loop reduction. Since
6967 it is not difficult, try all four possibilities. */
6969 if (TREE_CODE (arg0) == MULT_EXPR)
6971 arg00 = TREE_OPERAND (arg0, 0);
6972 arg01 = TREE_OPERAND (arg0, 1);
6974 else if (TREE_CODE (arg0) == INTEGER_CST)
6976 arg00 = build_one_cst (type);
6981 /* We cannot generate constant 1 for fract. */
6982 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6985 arg01 = build_one_cst (type);
6987 if (TREE_CODE (arg1) == MULT_EXPR)
6989 arg10 = TREE_OPERAND (arg1, 0);
6990 arg11 = TREE_OPERAND (arg1, 1);
6992 else if (TREE_CODE (arg1) == INTEGER_CST)
6994 arg10 = build_one_cst (type);
6995 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6996 the purpose of this canonicalization. */
6997 if (TREE_INT_CST_HIGH (arg1) == -1
6998 && negate_expr_p (arg1)
6999 && code == PLUS_EXPR)
7001 arg11 = negate_expr (arg1);
7009 /* We cannot generate constant 1 for fract. */
7010 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7013 arg11 = build_one_cst (type);
7017 if (operand_equal_p (arg01, arg11, 0))
7018 same = arg01, alt0 = arg00, alt1 = arg10;
7019 else if (operand_equal_p (arg00, arg10, 0))
7020 same = arg00, alt0 = arg01, alt1 = arg11;
7021 else if (operand_equal_p (arg00, arg11, 0))
7022 same = arg00, alt0 = arg01, alt1 = arg10;
7023 else if (operand_equal_p (arg01, arg10, 0))
7024 same = arg01, alt0 = arg00, alt1 = arg11;
7026 /* No identical multiplicands; see if we can find a common
7027 power-of-two factor in non-power-of-two multiplies. This
7028 can help in multi-dimensional array access. */
7029 else if (host_integerp (arg01, 0)
7030 && host_integerp (arg11, 0))
7032 HOST_WIDE_INT int01, int11, tmp;
7035 int01 = TREE_INT_CST_LOW (arg01);
7036 int11 = TREE_INT_CST_LOW (arg11);
7038 /* Move min of absolute values to int11. */
7039 if (abs_hwi (int01) < abs_hwi (int11))
7041 tmp = int01, int01 = int11, int11 = tmp;
7042 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7049 if (exact_log2 (abs_hwi (int11)) > 0 && int01 % int11 == 0
7050 /* The remainder should not be a constant, otherwise we
7051 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7052 increased the number of multiplications necessary. */
7053 && TREE_CODE (arg10) != INTEGER_CST)
7055 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7056 build_int_cst (TREE_TYPE (arg00),
7061 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7066 return fold_build2_loc (loc, MULT_EXPR, type,
7067 fold_build2_loc (loc, code, type,
7068 fold_convert_loc (loc, type, alt0),
7069 fold_convert_loc (loc, type, alt1)),
7070 fold_convert_loc (loc, type, same));
7075 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7076 specified by EXPR into the buffer PTR of length LEN bytes.
7077 Return the number of bytes placed in the buffer, or zero
7081 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7083 tree type = TREE_TYPE (expr);
7084 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7085 int byte, offset, word, words;
7086 unsigned char value;
7088 if (total_bytes > len)
7090 words = total_bytes / UNITS_PER_WORD;
7092 for (byte = 0; byte < total_bytes; byte++)
7094 int bitpos = byte * BITS_PER_UNIT;
7095 if (bitpos < HOST_BITS_PER_WIDE_INT)
7096 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7098 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7099 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7101 if (total_bytes > UNITS_PER_WORD)
7103 word = byte / UNITS_PER_WORD;
7104 if (WORDS_BIG_ENDIAN)
7105 word = (words - 1) - word;
7106 offset = word * UNITS_PER_WORD;
7107 if (BYTES_BIG_ENDIAN)
7108 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7110 offset += byte % UNITS_PER_WORD;
7113 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7114 ptr[offset] = value;
7120 /* Subroutine of native_encode_expr. Encode the REAL_CST
7121 specified by EXPR into the buffer PTR of length LEN bytes.
7122 Return the number of bytes placed in the buffer, or zero
7126 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7128 tree type = TREE_TYPE (expr);
7129 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7130 int byte, offset, word, words, bitpos;
7131 unsigned char value;
7133 /* There are always 32 bits in each long, no matter the size of
7134 the hosts long. We handle floating point representations with
7138 if (total_bytes > len)
7140 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7142 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7144 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7145 bitpos += BITS_PER_UNIT)
7147 byte = (bitpos / BITS_PER_UNIT) & 3;
7148 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7150 if (UNITS_PER_WORD < 4)
7152 word = byte / UNITS_PER_WORD;
7153 if (WORDS_BIG_ENDIAN)
7154 word = (words - 1) - word;
7155 offset = word * UNITS_PER_WORD;
7156 if (BYTES_BIG_ENDIAN)
7157 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7159 offset += byte % UNITS_PER_WORD;
7162 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7163 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7168 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7169 specified by EXPR into the buffer PTR of length LEN bytes.
7170 Return the number of bytes placed in the buffer, or zero
7174 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7179 part = TREE_REALPART (expr);
7180 rsize = native_encode_expr (part, ptr, len);
7183 part = TREE_IMAGPART (expr);
7184 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7187 return rsize + isize;
7191 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7192 specified by EXPR into the buffer PTR of length LEN bytes.
7193 Return the number of bytes placed in the buffer, or zero
7197 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7199 int i, size, offset, count;
7200 tree itype, elem, elements;
7203 elements = TREE_VECTOR_CST_ELTS (expr);
7204 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7205 itype = TREE_TYPE (TREE_TYPE (expr));
7206 size = GET_MODE_SIZE (TYPE_MODE (itype));
7207 for (i = 0; i < count; i++)
7211 elem = TREE_VALUE (elements);
7212 elements = TREE_CHAIN (elements);
7219 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7224 if (offset + size > len)
7226 memset (ptr+offset, 0, size);
7234 /* Subroutine of native_encode_expr. Encode the STRING_CST
7235 specified by EXPR into the buffer PTR of length LEN bytes.
7236 Return the number of bytes placed in the buffer, or zero
7240 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7242 tree type = TREE_TYPE (expr);
7243 HOST_WIDE_INT total_bytes;
7245 if (TREE_CODE (type) != ARRAY_TYPE
7246 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7247 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7248 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7250 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7251 if (total_bytes > len)
7253 if (TREE_STRING_LENGTH (expr) < total_bytes)
7255 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7256 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7257 total_bytes - TREE_STRING_LENGTH (expr));
7260 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7265 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7266 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7267 buffer PTR of length LEN bytes. Return the number of bytes
7268 placed in the buffer, or zero upon failure. */
7271 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7273 switch (TREE_CODE (expr))
7276 return native_encode_int (expr, ptr, len);
7279 return native_encode_real (expr, ptr, len);
7282 return native_encode_complex (expr, ptr, len);
7285 return native_encode_vector (expr, ptr, len);
7288 return native_encode_string (expr, ptr, len);
7296 /* Subroutine of native_interpret_expr. Interpret the contents of
7297 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7298 If the buffer cannot be interpreted, return NULL_TREE. */
7301 native_interpret_int (tree type, const unsigned char *ptr, int len)
7303 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7304 int byte, offset, word, words;
7305 unsigned char value;
7308 if (total_bytes > len)
7310 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7313 result = double_int_zero;
7314 words = total_bytes / UNITS_PER_WORD;
7316 for (byte = 0; byte < total_bytes; byte++)
7318 int bitpos = byte * BITS_PER_UNIT;
7319 if (total_bytes > UNITS_PER_WORD)
7321 word = byte / UNITS_PER_WORD;
7322 if (WORDS_BIG_ENDIAN)
7323 word = (words - 1) - word;
7324 offset = word * UNITS_PER_WORD;
7325 if (BYTES_BIG_ENDIAN)
7326 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7328 offset += byte % UNITS_PER_WORD;
7331 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7332 value = ptr[offset];
7334 if (bitpos < HOST_BITS_PER_WIDE_INT)
7335 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7337 result.high |= (unsigned HOST_WIDE_INT) value
7338 << (bitpos - HOST_BITS_PER_WIDE_INT);
7341 return double_int_to_tree (type, result);
7345 /* Subroutine of native_interpret_expr. Interpret the contents of
7346 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7347 If the buffer cannot be interpreted, return NULL_TREE. */
7350 native_interpret_real (tree type, const unsigned char *ptr, int len)
7352 enum machine_mode mode = TYPE_MODE (type);
7353 int total_bytes = GET_MODE_SIZE (mode);
7354 int byte, offset, word, words, bitpos;
7355 unsigned char value;
7356 /* There are always 32 bits in each long, no matter the size of
7357 the hosts long. We handle floating point representations with
7362 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7363 if (total_bytes > len || total_bytes > 24)
7365 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7367 memset (tmp, 0, sizeof (tmp));
7368 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7369 bitpos += BITS_PER_UNIT)
7371 byte = (bitpos / BITS_PER_UNIT) & 3;
7372 if (UNITS_PER_WORD < 4)
7374 word = byte / UNITS_PER_WORD;
7375 if (WORDS_BIG_ENDIAN)
7376 word = (words - 1) - word;
7377 offset = word * UNITS_PER_WORD;
7378 if (BYTES_BIG_ENDIAN)
7379 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7381 offset += byte % UNITS_PER_WORD;
7384 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7385 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7387 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7390 real_from_target (&r, tmp, mode);
7391 return build_real (type, r);
7395 /* Subroutine of native_interpret_expr. Interpret the contents of
7396 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7397 If the buffer cannot be interpreted, return NULL_TREE. */
7400 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7402 tree etype, rpart, ipart;
7405 etype = TREE_TYPE (type);
7406 size = GET_MODE_SIZE (TYPE_MODE (etype));
7409 rpart = native_interpret_expr (etype, ptr, size);
7412 ipart = native_interpret_expr (etype, ptr+size, size);
7415 return build_complex (type, rpart, ipart);
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7424 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7426 tree etype, elem, elements;
7429 etype = TREE_TYPE (type);
7430 size = GET_MODE_SIZE (TYPE_MODE (etype));
7431 count = TYPE_VECTOR_SUBPARTS (type);
7432 if (size * count > len)
7435 elements = NULL_TREE;
7436 for (i = count - 1; i >= 0; i--)
7438 elem = native_interpret_expr (etype, ptr+(i*size), size);
7441 elements = tree_cons (NULL_TREE, elem, elements);
7443 return build_vector (type, elements);
7447 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7448 the buffer PTR of length LEN as a constant of type TYPE. For
7449 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7450 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7451 return NULL_TREE. */
7454 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7456 switch (TREE_CODE (type))
7461 return native_interpret_int (type, ptr, len);
7464 return native_interpret_real (type, ptr, len);
7467 return native_interpret_complex (type, ptr, len);
7470 return native_interpret_vector (type, ptr, len);
7478 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7479 TYPE at compile-time. If we're unable to perform the conversion
7480 return NULL_TREE. */
7483 fold_view_convert_expr (tree type, tree expr)
7485 /* We support up to 512-bit values (for V8DFmode). */
7486 unsigned char buffer[64];
7489 /* Check that the host and target are sane. */
7490 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7493 len = native_encode_expr (expr, buffer, sizeof (buffer));
7497 return native_interpret_expr (type, buffer, len);
7500 /* Build an expression for the address of T. Folds away INDIRECT_REF
7501 to avoid confusing the gimplify process. */
7504 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7506 /* The size of the object is not relevant when talking about its address. */
7507 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7508 t = TREE_OPERAND (t, 0);
7510 if (TREE_CODE (t) == INDIRECT_REF)
7512 t = TREE_OPERAND (t, 0);
7514 if (TREE_TYPE (t) != ptrtype)
7515 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7517 else if (TREE_CODE (t) == MEM_REF
7518 && integer_zerop (TREE_OPERAND (t, 1)))
7519 return TREE_OPERAND (t, 0);
7520 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7522 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7524 if (TREE_TYPE (t) != ptrtype)
7525 t = fold_convert_loc (loc, ptrtype, t);
7528 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7533 /* Build an expression for the address of T. */
7536 build_fold_addr_expr_loc (location_t loc, tree t)
7538 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7540 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7543 /* Fold a unary expression of code CODE and type TYPE with operand
7544 OP0. Return the folded expression if folding is successful.
7545 Otherwise, return NULL_TREE. */
7548 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7552 enum tree_code_class kind = TREE_CODE_CLASS (code);
7554 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7555 && TREE_CODE_LENGTH (code) == 1);
7560 if (CONVERT_EXPR_CODE_P (code)
7561 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7563 /* Don't use STRIP_NOPS, because signedness of argument type
7565 STRIP_SIGN_NOPS (arg0);
7569 /* Strip any conversions that don't change the mode. This
7570 is safe for every expression, except for a comparison
7571 expression because its signedness is derived from its
7574 Note that this is done as an internal manipulation within
7575 the constant folder, in order to find the simplest
7576 representation of the arguments so that their form can be
7577 studied. In any cases, the appropriate type conversions
7578 should be put back in the tree that will get out of the
7584 if (TREE_CODE_CLASS (code) == tcc_unary)
7586 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7587 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7588 fold_build1_loc (loc, code, type,
7589 fold_convert_loc (loc, TREE_TYPE (op0),
7590 TREE_OPERAND (arg0, 1))));
7591 else if (TREE_CODE (arg0) == COND_EXPR)
7593 tree arg01 = TREE_OPERAND (arg0, 1);
7594 tree arg02 = TREE_OPERAND (arg0, 2);
7595 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7596 arg01 = fold_build1_loc (loc, code, type,
7597 fold_convert_loc (loc,
7598 TREE_TYPE (op0), arg01));
7599 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7600 arg02 = fold_build1_loc (loc, code, type,
7601 fold_convert_loc (loc,
7602 TREE_TYPE (op0), arg02));
7603 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7606 /* If this was a conversion, and all we did was to move into
7607 inside the COND_EXPR, bring it back out. But leave it if
7608 it is a conversion from integer to integer and the
7609 result precision is no wider than a word since such a
7610 conversion is cheap and may be optimized away by combine,
7611 while it couldn't if it were outside the COND_EXPR. Then return
7612 so we don't get into an infinite recursion loop taking the
7613 conversion out and then back in. */
7615 if ((CONVERT_EXPR_CODE_P (code)
7616 || code == NON_LVALUE_EXPR)
7617 && TREE_CODE (tem) == COND_EXPR
7618 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7619 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7620 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7621 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7622 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7623 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7624 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7626 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7627 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7628 || flag_syntax_only))
7629 tem = build1_loc (loc, code, type,
7631 TREE_TYPE (TREE_OPERAND
7632 (TREE_OPERAND (tem, 1), 0)),
7633 TREE_OPERAND (tem, 0),
7634 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7635 TREE_OPERAND (TREE_OPERAND (tem, 2),
7644 /* Re-association barriers around constants and other re-association
7645 barriers can be removed. */
7646 if (CONSTANT_CLASS_P (op0)
7647 || TREE_CODE (op0) == PAREN_EXPR)
7648 return fold_convert_loc (loc, type, op0);
7653 case FIX_TRUNC_EXPR:
7654 if (TREE_TYPE (op0) == type)
7657 if (COMPARISON_CLASS_P (op0))
7659 /* If we have (type) (a CMP b) and type is an integral type, return
7660 new expression involving the new type. Canonicalize
7661 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7663 Do not fold the result as that would not simplify further, also
7664 folding again results in recursions. */
7665 if (TREE_CODE (type) == BOOLEAN_TYPE)
7666 return build2_loc (loc, TREE_CODE (op0), type,
7667 TREE_OPERAND (op0, 0),
7668 TREE_OPERAND (op0, 1));
7669 else if (!INTEGRAL_TYPE_P (type))
7670 return build3_loc (loc, COND_EXPR, type, op0,
7671 fold_convert (type, boolean_true_node),
7672 fold_convert (type, boolean_false_node));
7675 /* Handle cases of two conversions in a row. */
7676 if (CONVERT_EXPR_P (op0))
7678 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7679 tree inter_type = TREE_TYPE (op0);
7680 int inside_int = INTEGRAL_TYPE_P (inside_type);
7681 int inside_ptr = POINTER_TYPE_P (inside_type);
7682 int inside_float = FLOAT_TYPE_P (inside_type);
7683 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7684 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7685 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7686 int inter_int = INTEGRAL_TYPE_P (inter_type);
7687 int inter_ptr = POINTER_TYPE_P (inter_type);
7688 int inter_float = FLOAT_TYPE_P (inter_type);
7689 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7690 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7691 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7692 int final_int = INTEGRAL_TYPE_P (type);
7693 int final_ptr = POINTER_TYPE_P (type);
7694 int final_float = FLOAT_TYPE_P (type);
7695 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7696 unsigned int final_prec = TYPE_PRECISION (type);
7697 int final_unsignedp = TYPE_UNSIGNED (type);
7699 /* In addition to the cases of two conversions in a row
7700 handled below, if we are converting something to its own
7701 type via an object of identical or wider precision, neither
7702 conversion is needed. */
7703 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7704 && (((inter_int || inter_ptr) && final_int)
7705 || (inter_float && final_float))
7706 && inter_prec >= final_prec)
7707 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7709 /* Likewise, if the intermediate and initial types are either both
7710 float or both integer, we don't need the middle conversion if the
7711 former is wider than the latter and doesn't change the signedness
7712 (for integers). Avoid this if the final type is a pointer since
7713 then we sometimes need the middle conversion. Likewise if the
7714 final type has a precision not equal to the size of its mode. */
7715 if (((inter_int && inside_int)
7716 || (inter_float && inside_float)
7717 || (inter_vec && inside_vec))
7718 && inter_prec >= inside_prec
7719 && (inter_float || inter_vec
7720 || inter_unsignedp == inside_unsignedp)
7721 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7722 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7724 && (! final_vec || inter_prec == inside_prec))
7725 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7727 /* If we have a sign-extension of a zero-extended value, we can
7728 replace that by a single zero-extension. */
7729 if (inside_int && inter_int && final_int
7730 && inside_prec < inter_prec && inter_prec < final_prec
7731 && inside_unsignedp && !inter_unsignedp)
7732 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7734 /* Two conversions in a row are not needed unless:
7735 - some conversion is floating-point (overstrict for now), or
7736 - some conversion is a vector (overstrict for now), or
7737 - the intermediate type is narrower than both initial and
7739 - the intermediate type and innermost type differ in signedness,
7740 and the outermost type is wider than the intermediate, or
7741 - the initial type is a pointer type and the precisions of the
7742 intermediate and final types differ, or
7743 - the final type is a pointer type and the precisions of the
7744 initial and intermediate types differ. */
7745 if (! inside_float && ! inter_float && ! final_float
7746 && ! inside_vec && ! inter_vec && ! final_vec
7747 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7748 && ! (inside_int && inter_int
7749 && inter_unsignedp != inside_unsignedp
7750 && inter_prec < final_prec)
7751 && ((inter_unsignedp && inter_prec > inside_prec)
7752 == (final_unsignedp && final_prec > inter_prec))
7753 && ! (inside_ptr && inter_prec != final_prec)
7754 && ! (final_ptr && inside_prec != inter_prec)
7755 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7756 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7757 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7760 /* Handle (T *)&A.B.C for A being of type T and B and C
7761 living at offset zero. This occurs frequently in
7762 C++ upcasting and then accessing the base. */
7763 if (TREE_CODE (op0) == ADDR_EXPR
7764 && POINTER_TYPE_P (type)
7765 && handled_component_p (TREE_OPERAND (op0, 0)))
7767 HOST_WIDE_INT bitsize, bitpos;
7769 enum machine_mode mode;
7770 int unsignedp, volatilep;
7771 tree base = TREE_OPERAND (op0, 0);
7772 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7773 &mode, &unsignedp, &volatilep, false);
7774 /* If the reference was to a (constant) zero offset, we can use
7775 the address of the base if it has the same base type
7776 as the result type and the pointer type is unqualified. */
7777 if (! offset && bitpos == 0
7778 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7779 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7780 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7781 return fold_convert_loc (loc, type,
7782 build_fold_addr_expr_loc (loc, base));
7785 if (TREE_CODE (op0) == MODIFY_EXPR
7786 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7787 /* Detect assigning a bitfield. */
7788 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7790 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7792 /* Don't leave an assignment inside a conversion
7793 unless assigning a bitfield. */
7794 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7795 /* First do the assignment, then return converted constant. */
7796 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7797 TREE_NO_WARNING (tem) = 1;
7798 TREE_USED (tem) = 1;
7802 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7803 constants (if x has signed type, the sign bit cannot be set
7804 in c). This folds extension into the BIT_AND_EXPR.
7805 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7806 very likely don't have maximal range for their precision and this
7807 transformation effectively doesn't preserve non-maximal ranges. */
7808 if (TREE_CODE (type) == INTEGER_TYPE
7809 && TREE_CODE (op0) == BIT_AND_EXPR
7810 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7812 tree and_expr = op0;
7813 tree and0 = TREE_OPERAND (and_expr, 0);
7814 tree and1 = TREE_OPERAND (and_expr, 1);
7817 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7818 || (TYPE_PRECISION (type)
7819 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7821 else if (TYPE_PRECISION (TREE_TYPE (and1))
7822 <= HOST_BITS_PER_WIDE_INT
7823 && host_integerp (and1, 1))
7825 unsigned HOST_WIDE_INT cst;
7827 cst = tree_low_cst (and1, 1);
7828 cst &= (HOST_WIDE_INT) -1
7829 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7830 change = (cst == 0);
7831 #ifdef LOAD_EXTEND_OP
7833 && !flag_syntax_only
7834 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7837 tree uns = unsigned_type_for (TREE_TYPE (and0));
7838 and0 = fold_convert_loc (loc, uns, and0);
7839 and1 = fold_convert_loc (loc, uns, and1);
7845 tem = force_fit_type_double (type, tree_to_double_int (and1),
7846 0, TREE_OVERFLOW (and1));
7847 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7848 fold_convert_loc (loc, type, and0), tem);
7852 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7853 when one of the new casts will fold away. Conservatively we assume
7854 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7855 if (POINTER_TYPE_P (type)
7856 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7857 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7858 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7859 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7861 tree arg00 = TREE_OPERAND (arg0, 0);
7862 tree arg01 = TREE_OPERAND (arg0, 1);
7864 return fold_build_pointer_plus_loc
7865 (loc, fold_convert_loc (loc, type, arg00), arg01);
7868 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7869 of the same precision, and X is an integer type not narrower than
7870 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7871 if (INTEGRAL_TYPE_P (type)
7872 && TREE_CODE (op0) == BIT_NOT_EXPR
7873 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7874 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7875 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7877 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7878 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7879 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7880 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7881 fold_convert_loc (loc, type, tem));
7884 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7885 type of X and Y (integer types only). */
7886 if (INTEGRAL_TYPE_P (type)
7887 && TREE_CODE (op0) == MULT_EXPR
7888 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7889 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7891 /* Be careful not to introduce new overflows. */
7893 if (TYPE_OVERFLOW_WRAPS (type))
7896 mult_type = unsigned_type_for (type);
7898 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7900 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7901 fold_convert_loc (loc, mult_type,
7902 TREE_OPERAND (op0, 0)),
7903 fold_convert_loc (loc, mult_type,
7904 TREE_OPERAND (op0, 1)));
7905 return fold_convert_loc (loc, type, tem);
7909 tem = fold_convert_const (code, type, op0);
7910 return tem ? tem : NULL_TREE;
7912 case ADDR_SPACE_CONVERT_EXPR:
7913 if (integer_zerop (arg0))
7914 return fold_convert_const (code, type, arg0);
7917 case FIXED_CONVERT_EXPR:
7918 tem = fold_convert_const (code, type, arg0);
7919 return tem ? tem : NULL_TREE;
7921 case VIEW_CONVERT_EXPR:
7922 if (TREE_TYPE (op0) == type)
7924 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7925 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7926 type, TREE_OPERAND (op0, 0));
7927 if (TREE_CODE (op0) == MEM_REF)
7928 return fold_build2_loc (loc, MEM_REF, type,
7929 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7931 /* For integral conversions with the same precision or pointer
7932 conversions use a NOP_EXPR instead. */
7933 if ((INTEGRAL_TYPE_P (type)
7934 || POINTER_TYPE_P (type))
7935 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7936 || POINTER_TYPE_P (TREE_TYPE (op0)))
7937 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7938 return fold_convert_loc (loc, type, op0);
7940 /* Strip inner integral conversions that do not change the precision. */
7941 if (CONVERT_EXPR_P (op0)
7942 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7943 || POINTER_TYPE_P (TREE_TYPE (op0)))
7944 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7945 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7946 && (TYPE_PRECISION (TREE_TYPE (op0))
7947 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7948 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7949 type, TREE_OPERAND (op0, 0));
7951 return fold_view_convert_expr (type, op0);
7954 tem = fold_negate_expr (loc, arg0);
7956 return fold_convert_loc (loc, type, tem);
7960 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7961 return fold_abs_const (arg0, type);
7962 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7963 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7964 /* Convert fabs((double)float) into (double)fabsf(float). */
7965 else if (TREE_CODE (arg0) == NOP_EXPR
7966 && TREE_CODE (type) == REAL_TYPE)
7968 tree targ0 = strip_float_extensions (arg0);
7970 return fold_convert_loc (loc, type,
7971 fold_build1_loc (loc, ABS_EXPR,
7975 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7976 else if (TREE_CODE (arg0) == ABS_EXPR)
7978 else if (tree_expr_nonnegative_p (arg0))
7981 /* Strip sign ops from argument. */
7982 if (TREE_CODE (type) == REAL_TYPE)
7984 tem = fold_strip_sign_ops (arg0);
7986 return fold_build1_loc (loc, ABS_EXPR, type,
7987 fold_convert_loc (loc, type, tem));
7992 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7993 return fold_convert_loc (loc, type, arg0);
7994 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7996 tree itype = TREE_TYPE (type);
7997 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7998 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7999 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8000 negate_expr (ipart));
8002 if (TREE_CODE (arg0) == COMPLEX_CST)
8004 tree itype = TREE_TYPE (type);
8005 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8006 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8007 return build_complex (type, rpart, negate_expr (ipart));
8009 if (TREE_CODE (arg0) == CONJ_EXPR)
8010 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8014 if (TREE_CODE (arg0) == INTEGER_CST)
8015 return fold_not_const (arg0, type);
8016 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8017 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8018 /* Convert ~ (-A) to A - 1. */
8019 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8020 return fold_build2_loc (loc, MINUS_EXPR, type,
8021 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8022 build_int_cst (type, 1));
8023 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8024 else if (INTEGRAL_TYPE_P (type)
8025 && ((TREE_CODE (arg0) == MINUS_EXPR
8026 && integer_onep (TREE_OPERAND (arg0, 1)))
8027 || (TREE_CODE (arg0) == PLUS_EXPR
8028 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8029 return fold_build1_loc (loc, NEGATE_EXPR, type,
8030 fold_convert_loc (loc, type,
8031 TREE_OPERAND (arg0, 0)));
8032 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8033 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8034 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8035 fold_convert_loc (loc, type,
8036 TREE_OPERAND (arg0, 0)))))
8037 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8038 fold_convert_loc (loc, type,
8039 TREE_OPERAND (arg0, 1)));
8040 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8041 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8042 fold_convert_loc (loc, type,
8043 TREE_OPERAND (arg0, 1)))))
8044 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8045 fold_convert_loc (loc, type,
8046 TREE_OPERAND (arg0, 0)), tem);
8047 /* Perform BIT_NOT_EXPR on each element individually. */
8048 else if (TREE_CODE (arg0) == VECTOR_CST)
8050 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8051 int count = TYPE_VECTOR_SUBPARTS (type), i;
8053 for (i = 0; i < count; i++)
8057 elem = TREE_VALUE (elements);
8058 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8059 if (elem == NULL_TREE)
8061 elements = TREE_CHAIN (elements);
8064 elem = build_int_cst (TREE_TYPE (type), -1);
8065 list = tree_cons (NULL_TREE, elem, list);
8068 return build_vector (type, nreverse (list));
8073 case TRUTH_NOT_EXPR:
8074 /* The argument to invert_truthvalue must have Boolean type. */
8075 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8076 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8078 /* Note that the operand of this must be an int
8079 and its values must be 0 or 1.
8080 ("true" is a fixed value perhaps depending on the language,
8081 but we don't handle values other than 1 correctly yet.) */
8082 tem = fold_truth_not_expr (loc, arg0);
8085 return fold_convert_loc (loc, type, tem);
8088 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8089 return fold_convert_loc (loc, type, arg0);
8090 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8091 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8092 TREE_OPERAND (arg0, 1));
8093 if (TREE_CODE (arg0) == COMPLEX_CST)
8094 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8095 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8097 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8098 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8099 fold_build1_loc (loc, REALPART_EXPR, itype,
8100 TREE_OPERAND (arg0, 0)),
8101 fold_build1_loc (loc, REALPART_EXPR, itype,
8102 TREE_OPERAND (arg0, 1)));
8103 return fold_convert_loc (loc, type, tem);
8105 if (TREE_CODE (arg0) == CONJ_EXPR)
8107 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8108 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8109 TREE_OPERAND (arg0, 0));
8110 return fold_convert_loc (loc, type, tem);
8112 if (TREE_CODE (arg0) == CALL_EXPR)
8114 tree fn = get_callee_fndecl (arg0);
8115 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8116 switch (DECL_FUNCTION_CODE (fn))
8118 CASE_FLT_FN (BUILT_IN_CEXPI):
8119 fn = mathfn_built_in (type, BUILT_IN_COS);
8121 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8131 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8132 return build_zero_cst (type);
8133 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8134 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8135 TREE_OPERAND (arg0, 0));
8136 if (TREE_CODE (arg0) == COMPLEX_CST)
8137 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8138 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8140 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8141 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8142 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8143 TREE_OPERAND (arg0, 0)),
8144 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8145 TREE_OPERAND (arg0, 1)));
8146 return fold_convert_loc (loc, type, tem);
8148 if (TREE_CODE (arg0) == CONJ_EXPR)
8150 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8151 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8152 return fold_convert_loc (loc, type, negate_expr (tem));
8154 if (TREE_CODE (arg0) == CALL_EXPR)
8156 tree fn = get_callee_fndecl (arg0);
8157 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8158 switch (DECL_FUNCTION_CODE (fn))
8160 CASE_FLT_FN (BUILT_IN_CEXPI):
8161 fn = mathfn_built_in (type, BUILT_IN_SIN);
8163 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8173 /* Fold *&X to X if X is an lvalue. */
8174 if (TREE_CODE (op0) == ADDR_EXPR)
8176 tree op00 = TREE_OPERAND (op0, 0);
8177 if ((TREE_CODE (op00) == VAR_DECL
8178 || TREE_CODE (op00) == PARM_DECL
8179 || TREE_CODE (op00) == RESULT_DECL)
8180 && !TREE_READONLY (op00))
8187 } /* switch (code) */
8191 /* If the operation was a conversion do _not_ mark a resulting constant
8192 with TREE_OVERFLOW if the original constant was not. These conversions
8193 have implementation defined behavior and retaining the TREE_OVERFLOW
8194 flag here would confuse later passes such as VRP. */
8196 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8197 tree type, tree op0)
8199 tree res = fold_unary_loc (loc, code, type, op0);
8201 && TREE_CODE (res) == INTEGER_CST
8202 && TREE_CODE (op0) == INTEGER_CST
8203 && CONVERT_EXPR_CODE_P (code))
8204 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8209 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8210 operands OP0 and OP1. LOC is the location of the resulting expression.
8211 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8212 Return the folded expression if folding is successful. Otherwise,
8213 return NULL_TREE. */
8215 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8216 tree arg0, tree arg1, tree op0, tree op1)
8220 /* We only do these simplifications if we are optimizing. */
8224 /* Check for things like (A || B) && (A || C). We can convert this
8225 to A || (B && C). Note that either operator can be any of the four
8226 truth and/or operations and the transformation will still be
8227 valid. Also note that we only care about order for the
8228 ANDIF and ORIF operators. If B contains side effects, this
8229 might change the truth-value of A. */
8230 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8231 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8232 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8233 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8234 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8235 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8237 tree a00 = TREE_OPERAND (arg0, 0);
8238 tree a01 = TREE_OPERAND (arg0, 1);
8239 tree a10 = TREE_OPERAND (arg1, 0);
8240 tree a11 = TREE_OPERAND (arg1, 1);
8241 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8242 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8243 && (code == TRUTH_AND_EXPR
8244 || code == TRUTH_OR_EXPR));
8246 if (operand_equal_p (a00, a10, 0))
8247 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8248 fold_build2_loc (loc, code, type, a01, a11));
8249 else if (commutative && operand_equal_p (a00, a11, 0))
8250 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8251 fold_build2_loc (loc, code, type, a01, a10));
8252 else if (commutative && operand_equal_p (a01, a10, 0))
8253 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8254 fold_build2_loc (loc, code, type, a00, a11));
8256 /* This case if tricky because we must either have commutative
8257 operators or else A10 must not have side-effects. */
8259 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8260 && operand_equal_p (a01, a11, 0))
8261 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8262 fold_build2_loc (loc, code, type, a00, a10),
8266 /* See if we can build a range comparison. */
8267 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8270 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8271 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8273 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8275 return fold_build2_loc (loc, code, type, tem, arg1);
8278 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8279 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8281 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8283 return fold_build2_loc (loc, code, type, arg0, tem);
8286 /* Check for the possibility of merging component references. If our
8287 lhs is another similar operation, try to merge its rhs with our
8288 rhs. Then try to merge our lhs and rhs. */
8289 if (TREE_CODE (arg0) == code
8290 && 0 != (tem = fold_truthop (loc, code, type,
8291 TREE_OPERAND (arg0, 1), arg1)))
8292 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8294 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
8300 /* Fold a binary expression of code CODE and type TYPE with operands
8301 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8302 Return the folded expression if folding is successful. Otherwise,
8303 return NULL_TREE. */
8306 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8308 enum tree_code compl_code;
8310 if (code == MIN_EXPR)
8311 compl_code = MAX_EXPR;
8312 else if (code == MAX_EXPR)
8313 compl_code = MIN_EXPR;
8317 /* MIN (MAX (a, b), b) == b. */
8318 if (TREE_CODE (op0) == compl_code
8319 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8320 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8322 /* MIN (MAX (b, a), b) == b. */
8323 if (TREE_CODE (op0) == compl_code
8324 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8325 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8326 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8328 /* MIN (a, MAX (a, b)) == a. */
8329 if (TREE_CODE (op1) == compl_code
8330 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8331 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8332 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8334 /* MIN (a, MAX (b, a)) == a. */
8335 if (TREE_CODE (op1) == compl_code
8336 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8337 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8338 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8343 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8344 by changing CODE to reduce the magnitude of constants involved in
8345 ARG0 of the comparison.
8346 Returns a canonicalized comparison tree if a simplification was
8347 possible, otherwise returns NULL_TREE.
8348 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8349 valid if signed overflow is undefined. */
8352 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8353 tree arg0, tree arg1,
8354 bool *strict_overflow_p)
8356 enum tree_code code0 = TREE_CODE (arg0);
8357 tree t, cst0 = NULL_TREE;
8361 /* Match A +- CST code arg1 and CST code arg1. We can change the
8362 first form only if overflow is undefined. */
8363 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8364 /* In principle pointers also have undefined overflow behavior,
8365 but that causes problems elsewhere. */
8366 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8367 && (code0 == MINUS_EXPR
8368 || code0 == PLUS_EXPR)
8369 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8370 || code0 == INTEGER_CST))
8373 /* Identify the constant in arg0 and its sign. */
8374 if (code0 == INTEGER_CST)
8377 cst0 = TREE_OPERAND (arg0, 1);
8378 sgn0 = tree_int_cst_sgn (cst0);
8380 /* Overflowed constants and zero will cause problems. */
8381 if (integer_zerop (cst0)
8382 || TREE_OVERFLOW (cst0))
8385 /* See if we can reduce the magnitude of the constant in
8386 arg0 by changing the comparison code. */
8387 if (code0 == INTEGER_CST)
8389 /* CST <= arg1 -> CST-1 < arg1. */
8390 if (code == LE_EXPR && sgn0 == 1)
8392 /* -CST < arg1 -> -CST-1 <= arg1. */
8393 else if (code == LT_EXPR && sgn0 == -1)
8395 /* CST > arg1 -> CST-1 >= arg1. */
8396 else if (code == GT_EXPR && sgn0 == 1)
8398 /* -CST >= arg1 -> -CST-1 > arg1. */
8399 else if (code == GE_EXPR && sgn0 == -1)
8403 /* arg1 code' CST' might be more canonical. */
8408 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8410 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8412 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8413 else if (code == GT_EXPR
8414 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8416 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8417 else if (code == LE_EXPR
8418 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8420 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8421 else if (code == GE_EXPR
8422 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8426 *strict_overflow_p = true;
8429 /* Now build the constant reduced in magnitude. But not if that
8430 would produce one outside of its types range. */
8431 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8433 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8434 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8436 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8437 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8438 /* We cannot swap the comparison here as that would cause us to
8439 endlessly recurse. */
8442 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8443 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8444 if (code0 != INTEGER_CST)
8445 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8446 t = fold_convert (TREE_TYPE (arg1), t);
8448 /* If swapping might yield to a more canonical form, do so. */
8450 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8452 return fold_build2_loc (loc, code, type, t, arg1);
8455 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8456 overflow further. Try to decrease the magnitude of constants involved
8457 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8458 and put sole constants at the second argument position.
8459 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8462 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8463 tree arg0, tree arg1)
8466 bool strict_overflow_p;
8467 const char * const warnmsg = G_("assuming signed overflow does not occur "
8468 "when reducing constant in comparison");
8470 /* Try canonicalization by simplifying arg0. */
8471 strict_overflow_p = false;
8472 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8473 &strict_overflow_p);
8476 if (strict_overflow_p)
8477 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8481 /* Try canonicalization by simplifying arg1 using the swapped
8483 code = swap_tree_comparison (code);
8484 strict_overflow_p = false;
8485 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8486 &strict_overflow_p);
8487 if (t && strict_overflow_p)
8488 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8492 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8493 space. This is used to avoid issuing overflow warnings for
8494 expressions like &p->x which can not wrap. */
8497 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8499 unsigned HOST_WIDE_INT offset_low, total_low;
8500 HOST_WIDE_INT size, offset_high, total_high;
8502 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8508 if (offset == NULL_TREE)
8513 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8517 offset_low = TREE_INT_CST_LOW (offset);
8518 offset_high = TREE_INT_CST_HIGH (offset);
8521 if (add_double_with_sign (offset_low, offset_high,
8522 bitpos / BITS_PER_UNIT, 0,
8523 &total_low, &total_high,
8527 if (total_high != 0)
8530 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8534 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8536 if (TREE_CODE (base) == ADDR_EXPR)
8538 HOST_WIDE_INT base_size;
8540 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8541 if (base_size > 0 && size < base_size)
8545 return total_low > (unsigned HOST_WIDE_INT) size;
8548 /* Subroutine of fold_binary. This routine performs all of the
8549 transformations that are common to the equality/inequality
8550 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8551 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8552 fold_binary should call fold_binary. Fold a comparison with
8553 tree code CODE and type TYPE with operands OP0 and OP1. Return
8554 the folded comparison or NULL_TREE. */
8557 fold_comparison (location_t loc, enum tree_code code, tree type,
8560 tree arg0, arg1, tem;
8565 STRIP_SIGN_NOPS (arg0);
8566 STRIP_SIGN_NOPS (arg1);
8568 tem = fold_relational_const (code, type, arg0, arg1);
8569 if (tem != NULL_TREE)
8572 /* If one arg is a real or integer constant, put it last. */
8573 if (tree_swap_operands_p (arg0, arg1, true))
8574 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8576 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8577 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8578 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8579 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8580 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8581 && (TREE_CODE (arg1) == INTEGER_CST
8582 && !TREE_OVERFLOW (arg1)))
8584 tree const1 = TREE_OPERAND (arg0, 1);
8586 tree variable = TREE_OPERAND (arg0, 0);
8589 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8591 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8592 TREE_TYPE (arg1), const2, const1);
8594 /* If the constant operation overflowed this can be
8595 simplified as a comparison against INT_MAX/INT_MIN. */
8596 if (TREE_CODE (lhs) == INTEGER_CST
8597 && TREE_OVERFLOW (lhs))
8599 int const1_sgn = tree_int_cst_sgn (const1);
8600 enum tree_code code2 = code;
8602 /* Get the sign of the constant on the lhs if the
8603 operation were VARIABLE + CONST1. */
8604 if (TREE_CODE (arg0) == MINUS_EXPR)
8605 const1_sgn = -const1_sgn;
8607 /* The sign of the constant determines if we overflowed
8608 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8609 Canonicalize to the INT_MIN overflow by swapping the comparison
8611 if (const1_sgn == -1)
8612 code2 = swap_tree_comparison (code);
8614 /* We now can look at the canonicalized case
8615 VARIABLE + 1 CODE2 INT_MIN
8616 and decide on the result. */
8617 if (code2 == LT_EXPR
8619 || code2 == EQ_EXPR)
8620 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8621 else if (code2 == NE_EXPR
8623 || code2 == GT_EXPR)
8624 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8627 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8628 && (TREE_CODE (lhs) != INTEGER_CST
8629 || !TREE_OVERFLOW (lhs)))
8631 if (code != EQ_EXPR && code != NE_EXPR)
8632 fold_overflow_warning ("assuming signed overflow does not occur "
8633 "when changing X +- C1 cmp C2 to "
8635 WARN_STRICT_OVERFLOW_COMPARISON);
8636 return fold_build2_loc (loc, code, type, variable, lhs);
8640 /* For comparisons of pointers we can decompose it to a compile time
8641 comparison of the base objects and the offsets into the object.
8642 This requires at least one operand being an ADDR_EXPR or a
8643 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8644 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8645 && (TREE_CODE (arg0) == ADDR_EXPR
8646 || TREE_CODE (arg1) == ADDR_EXPR
8647 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8648 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8650 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8651 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8652 enum machine_mode mode;
8653 int volatilep, unsignedp;
8654 bool indirect_base0 = false, indirect_base1 = false;
8656 /* Get base and offset for the access. Strip ADDR_EXPR for
8657 get_inner_reference, but put it back by stripping INDIRECT_REF
8658 off the base object if possible. indirect_baseN will be true
8659 if baseN is not an address but refers to the object itself. */
8661 if (TREE_CODE (arg0) == ADDR_EXPR)
8663 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8664 &bitsize, &bitpos0, &offset0, &mode,
8665 &unsignedp, &volatilep, false);
8666 if (TREE_CODE (base0) == INDIRECT_REF)
8667 base0 = TREE_OPERAND (base0, 0);
8669 indirect_base0 = true;
8671 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8673 base0 = TREE_OPERAND (arg0, 0);
8674 STRIP_SIGN_NOPS (base0);
8675 if (TREE_CODE (base0) == ADDR_EXPR)
8677 base0 = TREE_OPERAND (base0, 0);
8678 indirect_base0 = true;
8680 offset0 = TREE_OPERAND (arg0, 1);
8684 if (TREE_CODE (arg1) == ADDR_EXPR)
8686 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8687 &bitsize, &bitpos1, &offset1, &mode,
8688 &unsignedp, &volatilep, false);
8689 if (TREE_CODE (base1) == INDIRECT_REF)
8690 base1 = TREE_OPERAND (base1, 0);
8692 indirect_base1 = true;
8694 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8696 base1 = TREE_OPERAND (arg1, 0);
8697 STRIP_SIGN_NOPS (base1);
8698 if (TREE_CODE (base1) == ADDR_EXPR)
8700 base1 = TREE_OPERAND (base1, 0);
8701 indirect_base1 = true;
8703 offset1 = TREE_OPERAND (arg1, 1);
8706 /* A local variable can never be pointed to by
8707 the default SSA name of an incoming parameter. */
8708 if ((TREE_CODE (arg0) == ADDR_EXPR
8710 && TREE_CODE (base0) == VAR_DECL
8711 && auto_var_in_fn_p (base0, current_function_decl)
8713 && TREE_CODE (base1) == SSA_NAME
8714 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8715 && SSA_NAME_IS_DEFAULT_DEF (base1))
8716 || (TREE_CODE (arg1) == ADDR_EXPR
8718 && TREE_CODE (base1) == VAR_DECL
8719 && auto_var_in_fn_p (base1, current_function_decl)
8721 && TREE_CODE (base0) == SSA_NAME
8722 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8723 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8725 if (code == NE_EXPR)
8726 return constant_boolean_node (1, type);
8727 else if (code == EQ_EXPR)
8728 return constant_boolean_node (0, type);
8730 /* If we have equivalent bases we might be able to simplify. */
8731 else if (indirect_base0 == indirect_base1
8732 && operand_equal_p (base0, base1, 0))
8734 /* We can fold this expression to a constant if the non-constant
8735 offset parts are equal. */
8736 if ((offset0 == offset1
8737 || (offset0 && offset1
8738 && operand_equal_p (offset0, offset1, 0)))
8741 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8746 && bitpos0 != bitpos1
8747 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8748 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8749 fold_overflow_warning (("assuming pointer wraparound does not "
8750 "occur when comparing P +- C1 with "
8752 WARN_STRICT_OVERFLOW_CONDITIONAL);
8757 return constant_boolean_node (bitpos0 == bitpos1, type);
8759 return constant_boolean_node (bitpos0 != bitpos1, type);
8761 return constant_boolean_node (bitpos0 < bitpos1, type);
8763 return constant_boolean_node (bitpos0 <= bitpos1, type);
8765 return constant_boolean_node (bitpos0 >= bitpos1, type);
8767 return constant_boolean_node (bitpos0 > bitpos1, type);
8771 /* We can simplify the comparison to a comparison of the variable
8772 offset parts if the constant offset parts are equal.
8773 Be careful to use signed size type here because otherwise we
8774 mess with array offsets in the wrong way. This is possible
8775 because pointer arithmetic is restricted to retain within an
8776 object and overflow on pointer differences is undefined as of
8777 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8778 else if (bitpos0 == bitpos1
8779 && ((code == EQ_EXPR || code == NE_EXPR)
8780 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8782 /* By converting to signed size type we cover middle-end pointer
8783 arithmetic which operates on unsigned pointer types of size
8784 type size and ARRAY_REF offsets which are properly sign or
8785 zero extended from their type in case it is narrower than
8787 if (offset0 == NULL_TREE)
8788 offset0 = build_int_cst (ssizetype, 0);
8790 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8791 if (offset1 == NULL_TREE)
8792 offset1 = build_int_cst (ssizetype, 0);
8794 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8798 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8799 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8800 fold_overflow_warning (("assuming pointer wraparound does not "
8801 "occur when comparing P +- C1 with "
8803 WARN_STRICT_OVERFLOW_COMPARISON);
8805 return fold_build2_loc (loc, code, type, offset0, offset1);
8808 /* For non-equal bases we can simplify if they are addresses
8809 of local binding decls or constants. */
8810 else if (indirect_base0 && indirect_base1
8811 /* We know that !operand_equal_p (base0, base1, 0)
8812 because the if condition was false. But make
8813 sure two decls are not the same. */
8815 && TREE_CODE (arg0) == ADDR_EXPR
8816 && TREE_CODE (arg1) == ADDR_EXPR
8817 && (((TREE_CODE (base0) == VAR_DECL
8818 || TREE_CODE (base0) == PARM_DECL)
8819 && (targetm.binds_local_p (base0)
8820 || CONSTANT_CLASS_P (base1)))
8821 || CONSTANT_CLASS_P (base0))
8822 && (((TREE_CODE (base1) == VAR_DECL
8823 || TREE_CODE (base1) == PARM_DECL)
8824 && (targetm.binds_local_p (base1)
8825 || CONSTANT_CLASS_P (base0)))
8826 || CONSTANT_CLASS_P (base1)))
8828 if (code == EQ_EXPR)
8829 return omit_two_operands_loc (loc, type, boolean_false_node,
8831 else if (code == NE_EXPR)
8832 return omit_two_operands_loc (loc, type, boolean_true_node,
8835 /* For equal offsets we can simplify to a comparison of the
8837 else if (bitpos0 == bitpos1
8839 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8841 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8842 && ((offset0 == offset1)
8843 || (offset0 && offset1
8844 && operand_equal_p (offset0, offset1, 0))))
8847 base0 = build_fold_addr_expr_loc (loc, base0);
8849 base1 = build_fold_addr_expr_loc (loc, base1);
8850 return fold_build2_loc (loc, code, type, base0, base1);
8854 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8855 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8856 the resulting offset is smaller in absolute value than the
8858 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8859 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8860 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8861 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8862 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8863 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8864 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8866 tree const1 = TREE_OPERAND (arg0, 1);
8867 tree const2 = TREE_OPERAND (arg1, 1);
8868 tree variable1 = TREE_OPERAND (arg0, 0);
8869 tree variable2 = TREE_OPERAND (arg1, 0);
8871 const char * const warnmsg = G_("assuming signed overflow does not "
8872 "occur when combining constants around "
8875 /* Put the constant on the side where it doesn't overflow and is
8876 of lower absolute value than before. */
8877 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8878 ? MINUS_EXPR : PLUS_EXPR,
8880 if (!TREE_OVERFLOW (cst)
8881 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8883 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8884 return fold_build2_loc (loc, code, type,
8886 fold_build2_loc (loc,
8887 TREE_CODE (arg1), TREE_TYPE (arg1),
8891 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8892 ? MINUS_EXPR : PLUS_EXPR,
8894 if (!TREE_OVERFLOW (cst)
8895 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8897 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8898 return fold_build2_loc (loc, code, type,
8899 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8905 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8906 signed arithmetic case. That form is created by the compiler
8907 often enough for folding it to be of value. One example is in
8908 computing loop trip counts after Operator Strength Reduction. */
8909 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8910 && TREE_CODE (arg0) == MULT_EXPR
8911 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8912 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8913 && integer_zerop (arg1))
8915 tree const1 = TREE_OPERAND (arg0, 1);
8916 tree const2 = arg1; /* zero */
8917 tree variable1 = TREE_OPERAND (arg0, 0);
8918 enum tree_code cmp_code = code;
8920 /* Handle unfolded multiplication by zero. */
8921 if (integer_zerop (const1))
8922 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8924 fold_overflow_warning (("assuming signed overflow does not occur when "
8925 "eliminating multiplication in comparison "
8927 WARN_STRICT_OVERFLOW_COMPARISON);
8929 /* If const1 is negative we swap the sense of the comparison. */
8930 if (tree_int_cst_sgn (const1) < 0)
8931 cmp_code = swap_tree_comparison (cmp_code);
8933 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8936 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8940 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8942 tree targ0 = strip_float_extensions (arg0);
8943 tree targ1 = strip_float_extensions (arg1);
8944 tree newtype = TREE_TYPE (targ0);
8946 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8947 newtype = TREE_TYPE (targ1);
8949 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8950 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8951 return fold_build2_loc (loc, code, type,
8952 fold_convert_loc (loc, newtype, targ0),
8953 fold_convert_loc (loc, newtype, targ1));
8955 /* (-a) CMP (-b) -> b CMP a */
8956 if (TREE_CODE (arg0) == NEGATE_EXPR
8957 && TREE_CODE (arg1) == NEGATE_EXPR)
8958 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8959 TREE_OPERAND (arg0, 0));
8961 if (TREE_CODE (arg1) == REAL_CST)
8963 REAL_VALUE_TYPE cst;
8964 cst = TREE_REAL_CST (arg1);
8966 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8967 if (TREE_CODE (arg0) == NEGATE_EXPR)
8968 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8969 TREE_OPERAND (arg0, 0),
8970 build_real (TREE_TYPE (arg1),
8971 real_value_negate (&cst)));
8973 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8974 /* a CMP (-0) -> a CMP 0 */
8975 if (REAL_VALUE_MINUS_ZERO (cst))
8976 return fold_build2_loc (loc, code, type, arg0,
8977 build_real (TREE_TYPE (arg1), dconst0));
8979 /* x != NaN is always true, other ops are always false. */
8980 if (REAL_VALUE_ISNAN (cst)
8981 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8983 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8984 return omit_one_operand_loc (loc, type, tem, arg0);
8987 /* Fold comparisons against infinity. */
8988 if (REAL_VALUE_ISINF (cst)
8989 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8991 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8992 if (tem != NULL_TREE)
8997 /* If this is a comparison of a real constant with a PLUS_EXPR
8998 or a MINUS_EXPR of a real constant, we can convert it into a
8999 comparison with a revised real constant as long as no overflow
9000 occurs when unsafe_math_optimizations are enabled. */
9001 if (flag_unsafe_math_optimizations
9002 && TREE_CODE (arg1) == REAL_CST
9003 && (TREE_CODE (arg0) == PLUS_EXPR
9004 || TREE_CODE (arg0) == MINUS_EXPR)
9005 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9006 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9007 ? MINUS_EXPR : PLUS_EXPR,
9008 arg1, TREE_OPERAND (arg0, 1)))
9009 && !TREE_OVERFLOW (tem))
9010 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9012 /* Likewise, we can simplify a comparison of a real constant with
9013 a MINUS_EXPR whose first operand is also a real constant, i.e.
9014 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9015 floating-point types only if -fassociative-math is set. */
9016 if (flag_associative_math
9017 && TREE_CODE (arg1) == REAL_CST
9018 && TREE_CODE (arg0) == MINUS_EXPR
9019 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9020 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9022 && !TREE_OVERFLOW (tem))
9023 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9024 TREE_OPERAND (arg0, 1), tem);
9026 /* Fold comparisons against built-in math functions. */
9027 if (TREE_CODE (arg1) == REAL_CST
9028 && flag_unsafe_math_optimizations
9029 && ! flag_errno_math)
9031 enum built_in_function fcode = builtin_mathfn_code (arg0);
9033 if (fcode != END_BUILTINS)
9035 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9036 if (tem != NULL_TREE)
9042 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9043 && CONVERT_EXPR_P (arg0))
9045 /* If we are widening one operand of an integer comparison,
9046 see if the other operand is similarly being widened. Perhaps we
9047 can do the comparison in the narrower type. */
9048 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9052 /* Or if we are changing signedness. */
9053 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9058 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9059 constant, we can simplify it. */
9060 if (TREE_CODE (arg1) == INTEGER_CST
9061 && (TREE_CODE (arg0) == MIN_EXPR
9062 || TREE_CODE (arg0) == MAX_EXPR)
9063 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9065 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9070 /* Simplify comparison of something with itself. (For IEEE
9071 floating-point, we can only do some of these simplifications.) */
9072 if (operand_equal_p (arg0, arg1, 0))
9077 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9078 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9079 return constant_boolean_node (1, type);
9084 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9085 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9086 return constant_boolean_node (1, type);
9087 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9090 /* For NE, we can only do this simplification if integer
9091 or we don't honor IEEE floating point NaNs. */
9092 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9093 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9095 /* ... fall through ... */
9098 return constant_boolean_node (0, type);
9104 /* If we are comparing an expression that just has comparisons
9105 of two integer values, arithmetic expressions of those comparisons,
9106 and constants, we can simplify it. There are only three cases
9107 to check: the two values can either be equal, the first can be
9108 greater, or the second can be greater. Fold the expression for
9109 those three values. Since each value must be 0 or 1, we have
9110 eight possibilities, each of which corresponds to the constant 0
9111 or 1 or one of the six possible comparisons.
9113 This handles common cases like (a > b) == 0 but also handles
9114 expressions like ((x > y) - (y > x)) > 0, which supposedly
9115 occur in macroized code. */
9117 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9119 tree cval1 = 0, cval2 = 0;
9122 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9123 /* Don't handle degenerate cases here; they should already
9124 have been handled anyway. */
9125 && cval1 != 0 && cval2 != 0
9126 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9127 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9128 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9129 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9130 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9131 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9132 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9134 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9135 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9137 /* We can't just pass T to eval_subst in case cval1 or cval2
9138 was the same as ARG1. */
9141 = fold_build2_loc (loc, code, type,
9142 eval_subst (loc, arg0, cval1, maxval,
9146 = fold_build2_loc (loc, code, type,
9147 eval_subst (loc, arg0, cval1, maxval,
9151 = fold_build2_loc (loc, code, type,
9152 eval_subst (loc, arg0, cval1, minval,
9156 /* All three of these results should be 0 or 1. Confirm they are.
9157 Then use those values to select the proper code to use. */
9159 if (TREE_CODE (high_result) == INTEGER_CST
9160 && TREE_CODE (equal_result) == INTEGER_CST
9161 && TREE_CODE (low_result) == INTEGER_CST)
9163 /* Make a 3-bit mask with the high-order bit being the
9164 value for `>', the next for '=', and the low for '<'. */
9165 switch ((integer_onep (high_result) * 4)
9166 + (integer_onep (equal_result) * 2)
9167 + integer_onep (low_result))
9171 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9192 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9197 tem = save_expr (build2 (code, type, cval1, cval2));
9198 SET_EXPR_LOCATION (tem, loc);
9201 return fold_build2_loc (loc, code, type, cval1, cval2);
9206 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9207 into a single range test. */
9208 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9209 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9210 && TREE_CODE (arg1) == INTEGER_CST
9211 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9212 && !integer_zerop (TREE_OPERAND (arg0, 1))
9213 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9214 && !TREE_OVERFLOW (arg1))
9216 tem = fold_div_compare (loc, code, type, arg0, arg1);
9217 if (tem != NULL_TREE)
9221 /* Fold ~X op ~Y as Y op X. */
9222 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9223 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9225 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9226 return fold_build2_loc (loc, code, type,
9227 fold_convert_loc (loc, cmp_type,
9228 TREE_OPERAND (arg1, 0)),
9229 TREE_OPERAND (arg0, 0));
9232 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9233 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9234 && TREE_CODE (arg1) == INTEGER_CST)
9236 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9237 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9238 TREE_OPERAND (arg0, 0),
9239 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9240 fold_convert_loc (loc, cmp_type, arg1)));
9247 /* Subroutine of fold_binary. Optimize complex multiplications of the
9248 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9249 argument EXPR represents the expression "z" of type TYPE. */
9252 fold_mult_zconjz (location_t loc, tree type, tree expr)
9254 tree itype = TREE_TYPE (type);
9255 tree rpart, ipart, tem;
9257 if (TREE_CODE (expr) == COMPLEX_EXPR)
9259 rpart = TREE_OPERAND (expr, 0);
9260 ipart = TREE_OPERAND (expr, 1);
9262 else if (TREE_CODE (expr) == COMPLEX_CST)
9264 rpart = TREE_REALPART (expr);
9265 ipart = TREE_IMAGPART (expr);
9269 expr = save_expr (expr);
9270 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9271 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9274 rpart = save_expr (rpart);
9275 ipart = save_expr (ipart);
9276 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9277 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9278 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9279 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9280 build_zero_cst (itype));
9284 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9285 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9286 guarantees that P and N have the same least significant log2(M) bits.
9287 N is not otherwise constrained. In particular, N is not normalized to
9288 0 <= N < M as is common. In general, the precise value of P is unknown.
9289 M is chosen as large as possible such that constant N can be determined.
9291 Returns M and sets *RESIDUE to N.
9293 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9294 account. This is not always possible due to PR 35705.
9297 static unsigned HOST_WIDE_INT
9298 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9299 bool allow_func_align)
9301 enum tree_code code;
9305 code = TREE_CODE (expr);
9306 if (code == ADDR_EXPR)
9308 unsigned int bitalign;
9309 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9310 *residue /= BITS_PER_UNIT;
9311 return bitalign / BITS_PER_UNIT;
9313 else if (code == POINTER_PLUS_EXPR)
9316 unsigned HOST_WIDE_INT modulus;
9317 enum tree_code inner_code;
9319 op0 = TREE_OPERAND (expr, 0);
9321 modulus = get_pointer_modulus_and_residue (op0, residue,
9324 op1 = TREE_OPERAND (expr, 1);
9326 inner_code = TREE_CODE (op1);
9327 if (inner_code == INTEGER_CST)
9329 *residue += TREE_INT_CST_LOW (op1);
9332 else if (inner_code == MULT_EXPR)
9334 op1 = TREE_OPERAND (op1, 1);
9335 if (TREE_CODE (op1) == INTEGER_CST)
9337 unsigned HOST_WIDE_INT align;
9339 /* Compute the greatest power-of-2 divisor of op1. */
9340 align = TREE_INT_CST_LOW (op1);
9343 /* If align is non-zero and less than *modulus, replace
9344 *modulus with align., If align is 0, then either op1 is 0
9345 or the greatest power-of-2 divisor of op1 doesn't fit in an
9346 unsigned HOST_WIDE_INT. In either case, no additional
9347 constraint is imposed. */
9349 modulus = MIN (modulus, align);
9356 /* If we get here, we were unable to determine anything useful about the
9362 /* Fold a binary expression of code CODE and type TYPE with operands
9363 OP0 and OP1. LOC is the location of the resulting expression.
9364 Return the folded expression if folding is successful. Otherwise,
9365 return NULL_TREE. */
9368 fold_binary_loc (location_t loc,
9369 enum tree_code code, tree type, tree op0, tree op1)
9371 enum tree_code_class kind = TREE_CODE_CLASS (code);
9372 tree arg0, arg1, tem;
9373 tree t1 = NULL_TREE;
9374 bool strict_overflow_p;
9376 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9377 && TREE_CODE_LENGTH (code) == 2
9379 && op1 != NULL_TREE);
9384 /* Strip any conversions that don't change the mode. This is
9385 safe for every expression, except for a comparison expression
9386 because its signedness is derived from its operands. So, in
9387 the latter case, only strip conversions that don't change the
9388 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9391 Note that this is done as an internal manipulation within the
9392 constant folder, in order to find the simplest representation
9393 of the arguments so that their form can be studied. In any
9394 cases, the appropriate type conversions should be put back in
9395 the tree that will get out of the constant folder. */
9397 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9399 STRIP_SIGN_NOPS (arg0);
9400 STRIP_SIGN_NOPS (arg1);
9408 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9409 constant but we can't do arithmetic on them. */
9410 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9411 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9412 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9413 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9414 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9415 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9417 if (kind == tcc_binary)
9419 /* Make sure type and arg0 have the same saturating flag. */
9420 gcc_assert (TYPE_SATURATING (type)
9421 == TYPE_SATURATING (TREE_TYPE (arg0)));
9422 tem = const_binop (code, arg0, arg1);
9424 else if (kind == tcc_comparison)
9425 tem = fold_relational_const (code, type, arg0, arg1);
9429 if (tem != NULL_TREE)
9431 if (TREE_TYPE (tem) != type)
9432 tem = fold_convert_loc (loc, type, tem);
9437 /* If this is a commutative operation, and ARG0 is a constant, move it
9438 to ARG1 to reduce the number of tests below. */
9439 if (commutative_tree_code (code)
9440 && tree_swap_operands_p (arg0, arg1, true))
9441 return fold_build2_loc (loc, code, type, op1, op0);
9443 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9445 First check for cases where an arithmetic operation is applied to a
9446 compound, conditional, or comparison operation. Push the arithmetic
9447 operation inside the compound or conditional to see if any folding
9448 can then be done. Convert comparison to conditional for this purpose.
9449 The also optimizes non-constant cases that used to be done in
9452 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9453 one of the operands is a comparison and the other is a comparison, a
9454 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9455 code below would make the expression more complex. Change it to a
9456 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9457 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9459 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9460 || code == EQ_EXPR || code == NE_EXPR)
9461 && ((truth_value_p (TREE_CODE (arg0))
9462 && (truth_value_p (TREE_CODE (arg1))
9463 || (TREE_CODE (arg1) == BIT_AND_EXPR
9464 && integer_onep (TREE_OPERAND (arg1, 1)))))
9465 || (truth_value_p (TREE_CODE (arg1))
9466 && (truth_value_p (TREE_CODE (arg0))
9467 || (TREE_CODE (arg0) == BIT_AND_EXPR
9468 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9470 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9471 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9474 fold_convert_loc (loc, boolean_type_node, arg0),
9475 fold_convert_loc (loc, boolean_type_node, arg1));
9477 if (code == EQ_EXPR)
9478 tem = invert_truthvalue_loc (loc, tem);
9480 return fold_convert_loc (loc, type, tem);
9483 if (TREE_CODE_CLASS (code) == tcc_binary
9484 || TREE_CODE_CLASS (code) == tcc_comparison)
9486 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9488 tem = fold_build2_loc (loc, code, type,
9489 fold_convert_loc (loc, TREE_TYPE (op0),
9490 TREE_OPERAND (arg0, 1)), op1);
9491 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9494 if (TREE_CODE (arg1) == COMPOUND_EXPR
9495 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9497 tem = fold_build2_loc (loc, code, type, op0,
9498 fold_convert_loc (loc, TREE_TYPE (op1),
9499 TREE_OPERAND (arg1, 1)));
9500 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9504 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9506 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9508 /*cond_first_p=*/1);
9509 if (tem != NULL_TREE)
9513 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9515 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9517 /*cond_first_p=*/0);
9518 if (tem != NULL_TREE)
9526 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9527 if (TREE_CODE (arg0) == ADDR_EXPR
9528 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9530 tree iref = TREE_OPERAND (arg0, 0);
9531 return fold_build2 (MEM_REF, type,
9532 TREE_OPERAND (iref, 0),
9533 int_const_binop (PLUS_EXPR, arg1,
9534 TREE_OPERAND (iref, 1)));
9537 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9538 if (TREE_CODE (arg0) == ADDR_EXPR
9539 && handled_component_p (TREE_OPERAND (arg0, 0)))
9542 HOST_WIDE_INT coffset;
9543 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9547 return fold_build2 (MEM_REF, type,
9548 build_fold_addr_expr (base),
9549 int_const_binop (PLUS_EXPR, arg1,
9550 size_int (coffset)));
9555 case POINTER_PLUS_EXPR:
9556 /* 0 +p index -> (type)index */
9557 if (integer_zerop (arg0))
9558 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9560 /* PTR +p 0 -> PTR */
9561 if (integer_zerop (arg1))
9562 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9564 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9565 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9566 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9567 return fold_convert_loc (loc, type,
9568 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9569 fold_convert_loc (loc, sizetype,
9571 fold_convert_loc (loc, sizetype,
9574 /* (PTR +p B) +p A -> PTR +p (B + A) */
9575 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9578 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9579 tree arg00 = TREE_OPERAND (arg0, 0);
9580 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9581 arg01, fold_convert_loc (loc, sizetype, arg1));
9582 return fold_convert_loc (loc, type,
9583 fold_build_pointer_plus_loc (loc,
9587 /* PTR_CST +p CST -> CST1 */
9588 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9589 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9590 fold_convert_loc (loc, type, arg1));
9592 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9593 of the array. Loop optimizer sometimes produce this type of
9595 if (TREE_CODE (arg0) == ADDR_EXPR)
9597 tem = try_move_mult_to_index (loc, arg0,
9598 fold_convert_loc (loc, sizetype, arg1));
9600 return fold_convert_loc (loc, type, tem);
9606 /* A + (-B) -> A - B */
9607 if (TREE_CODE (arg1) == NEGATE_EXPR)
9608 return fold_build2_loc (loc, MINUS_EXPR, type,
9609 fold_convert_loc (loc, type, arg0),
9610 fold_convert_loc (loc, type,
9611 TREE_OPERAND (arg1, 0)));
9612 /* (-A) + B -> B - A */
9613 if (TREE_CODE (arg0) == NEGATE_EXPR
9614 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9615 return fold_build2_loc (loc, MINUS_EXPR, type,
9616 fold_convert_loc (loc, type, arg1),
9617 fold_convert_loc (loc, type,
9618 TREE_OPERAND (arg0, 0)));
9620 if (INTEGRAL_TYPE_P (type))
9622 /* Convert ~A + 1 to -A. */
9623 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9624 && integer_onep (arg1))
9625 return fold_build1_loc (loc, NEGATE_EXPR, type,
9626 fold_convert_loc (loc, type,
9627 TREE_OPERAND (arg0, 0)));
9630 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9631 && !TYPE_OVERFLOW_TRAPS (type))
9633 tree tem = TREE_OPERAND (arg0, 0);
9636 if (operand_equal_p (tem, arg1, 0))
9638 t1 = build_int_cst_type (type, -1);
9639 return omit_one_operand_loc (loc, type, t1, arg1);
9644 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9645 && !TYPE_OVERFLOW_TRAPS (type))
9647 tree tem = TREE_OPERAND (arg1, 0);
9650 if (operand_equal_p (arg0, tem, 0))
9652 t1 = build_int_cst_type (type, -1);
9653 return omit_one_operand_loc (loc, type, t1, arg0);
9657 /* X + (X / CST) * -CST is X % CST. */
9658 if (TREE_CODE (arg1) == MULT_EXPR
9659 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9660 && operand_equal_p (arg0,
9661 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9663 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9664 tree cst1 = TREE_OPERAND (arg1, 1);
9665 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9667 if (sum && integer_zerop (sum))
9668 return fold_convert_loc (loc, type,
9669 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9670 TREE_TYPE (arg0), arg0,
9675 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9676 same or one. Make sure type is not saturating.
9677 fold_plusminus_mult_expr will re-associate. */
9678 if ((TREE_CODE (arg0) == MULT_EXPR
9679 || TREE_CODE (arg1) == MULT_EXPR)
9680 && !TYPE_SATURATING (type)
9681 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9683 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9688 if (! FLOAT_TYPE_P (type))
9690 if (integer_zerop (arg1))
9691 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9693 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9694 with a constant, and the two constants have no bits in common,
9695 we should treat this as a BIT_IOR_EXPR since this may produce more
9697 if (TREE_CODE (arg0) == BIT_AND_EXPR
9698 && TREE_CODE (arg1) == BIT_AND_EXPR
9699 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9700 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9701 && integer_zerop (const_binop (BIT_AND_EXPR,
9702 TREE_OPERAND (arg0, 1),
9703 TREE_OPERAND (arg1, 1))))
9705 code = BIT_IOR_EXPR;
9709 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9710 (plus (plus (mult) (mult)) (foo)) so that we can
9711 take advantage of the factoring cases below. */
9712 if (TYPE_OVERFLOW_WRAPS (type)
9713 && (((TREE_CODE (arg0) == PLUS_EXPR
9714 || TREE_CODE (arg0) == MINUS_EXPR)
9715 && TREE_CODE (arg1) == MULT_EXPR)
9716 || ((TREE_CODE (arg1) == PLUS_EXPR
9717 || TREE_CODE (arg1) == MINUS_EXPR)
9718 && TREE_CODE (arg0) == MULT_EXPR)))
9720 tree parg0, parg1, parg, marg;
9721 enum tree_code pcode;
9723 if (TREE_CODE (arg1) == MULT_EXPR)
9724 parg = arg0, marg = arg1;
9726 parg = arg1, marg = arg0;
9727 pcode = TREE_CODE (parg);
9728 parg0 = TREE_OPERAND (parg, 0);
9729 parg1 = TREE_OPERAND (parg, 1);
9733 if (TREE_CODE (parg0) == MULT_EXPR
9734 && TREE_CODE (parg1) != MULT_EXPR)
9735 return fold_build2_loc (loc, pcode, type,
9736 fold_build2_loc (loc, PLUS_EXPR, type,
9737 fold_convert_loc (loc, type,
9739 fold_convert_loc (loc, type,
9741 fold_convert_loc (loc, type, parg1));
9742 if (TREE_CODE (parg0) != MULT_EXPR
9743 && TREE_CODE (parg1) == MULT_EXPR)
9745 fold_build2_loc (loc, PLUS_EXPR, type,
9746 fold_convert_loc (loc, type, parg0),
9747 fold_build2_loc (loc, pcode, type,
9748 fold_convert_loc (loc, type, marg),
9749 fold_convert_loc (loc, type,
9755 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9756 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9757 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9759 /* Likewise if the operands are reversed. */
9760 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9761 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9763 /* Convert X + -C into X - C. */
9764 if (TREE_CODE (arg1) == REAL_CST
9765 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9767 tem = fold_negate_const (arg1, type);
9768 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9769 return fold_build2_loc (loc, MINUS_EXPR, type,
9770 fold_convert_loc (loc, type, arg0),
9771 fold_convert_loc (loc, type, tem));
9774 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9775 to __complex__ ( x, y ). This is not the same for SNaNs or
9776 if signed zeros are involved. */
9777 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9778 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9779 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9781 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9782 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9783 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9784 bool arg0rz = false, arg0iz = false;
9785 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9786 || (arg0i && (arg0iz = real_zerop (arg0i))))
9788 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9789 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9790 if (arg0rz && arg1i && real_zerop (arg1i))
9792 tree rp = arg1r ? arg1r
9793 : build1 (REALPART_EXPR, rtype, arg1);
9794 tree ip = arg0i ? arg0i
9795 : build1 (IMAGPART_EXPR, rtype, arg0);
9796 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9798 else if (arg0iz && arg1r && real_zerop (arg1r))
9800 tree rp = arg0r ? arg0r
9801 : build1 (REALPART_EXPR, rtype, arg0);
9802 tree ip = arg1i ? arg1i
9803 : build1 (IMAGPART_EXPR, rtype, arg1);
9804 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9809 if (flag_unsafe_math_optimizations
9810 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9811 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9812 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9815 /* Convert x+x into x*2.0. */
9816 if (operand_equal_p (arg0, arg1, 0)
9817 && SCALAR_FLOAT_TYPE_P (type))
9818 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9819 build_real (type, dconst2));
9821 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9822 We associate floats only if the user has specified
9823 -fassociative-math. */
9824 if (flag_associative_math
9825 && TREE_CODE (arg1) == PLUS_EXPR
9826 && TREE_CODE (arg0) != MULT_EXPR)
9828 tree tree10 = TREE_OPERAND (arg1, 0);
9829 tree tree11 = TREE_OPERAND (arg1, 1);
9830 if (TREE_CODE (tree11) == MULT_EXPR
9831 && TREE_CODE (tree10) == MULT_EXPR)
9834 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9835 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9838 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9839 We associate floats only if the user has specified
9840 -fassociative-math. */
9841 if (flag_associative_math
9842 && TREE_CODE (arg0) == PLUS_EXPR
9843 && TREE_CODE (arg1) != MULT_EXPR)
9845 tree tree00 = TREE_OPERAND (arg0, 0);
9846 tree tree01 = TREE_OPERAND (arg0, 1);
9847 if (TREE_CODE (tree01) == MULT_EXPR
9848 && TREE_CODE (tree00) == MULT_EXPR)
9851 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9852 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9858 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9859 is a rotate of A by C1 bits. */
9860 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9861 is a rotate of A by B bits. */
9863 enum tree_code code0, code1;
9865 code0 = TREE_CODE (arg0);
9866 code1 = TREE_CODE (arg1);
9867 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9868 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9869 && operand_equal_p (TREE_OPERAND (arg0, 0),
9870 TREE_OPERAND (arg1, 0), 0)
9871 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9872 TYPE_UNSIGNED (rtype))
9873 /* Only create rotates in complete modes. Other cases are not
9874 expanded properly. */
9875 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9877 tree tree01, tree11;
9878 enum tree_code code01, code11;
9880 tree01 = TREE_OPERAND (arg0, 1);
9881 tree11 = TREE_OPERAND (arg1, 1);
9882 STRIP_NOPS (tree01);
9883 STRIP_NOPS (tree11);
9884 code01 = TREE_CODE (tree01);
9885 code11 = TREE_CODE (tree11);
9886 if (code01 == INTEGER_CST
9887 && code11 == INTEGER_CST
9888 && TREE_INT_CST_HIGH (tree01) == 0
9889 && TREE_INT_CST_HIGH (tree11) == 0
9890 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9891 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9893 tem = build2_loc (loc, LROTATE_EXPR,
9894 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9895 TREE_OPERAND (arg0, 0),
9896 code0 == LSHIFT_EXPR ? tree01 : tree11);
9897 return fold_convert_loc (loc, type, tem);
9899 else if (code11 == MINUS_EXPR)
9901 tree tree110, tree111;
9902 tree110 = TREE_OPERAND (tree11, 0);
9903 tree111 = TREE_OPERAND (tree11, 1);
9904 STRIP_NOPS (tree110);
9905 STRIP_NOPS (tree111);
9906 if (TREE_CODE (tree110) == INTEGER_CST
9907 && 0 == compare_tree_int (tree110,
9909 (TREE_TYPE (TREE_OPERAND
9911 && operand_equal_p (tree01, tree111, 0))
9913 fold_convert_loc (loc, type,
9914 build2 ((code0 == LSHIFT_EXPR
9917 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9918 TREE_OPERAND (arg0, 0), tree01));
9920 else if (code01 == MINUS_EXPR)
9922 tree tree010, tree011;
9923 tree010 = TREE_OPERAND (tree01, 0);
9924 tree011 = TREE_OPERAND (tree01, 1);
9925 STRIP_NOPS (tree010);
9926 STRIP_NOPS (tree011);
9927 if (TREE_CODE (tree010) == INTEGER_CST
9928 && 0 == compare_tree_int (tree010,
9930 (TREE_TYPE (TREE_OPERAND
9932 && operand_equal_p (tree11, tree011, 0))
9933 return fold_convert_loc
9935 build2 ((code0 != LSHIFT_EXPR
9938 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9939 TREE_OPERAND (arg0, 0), tree11));
9945 /* In most languages, can't associate operations on floats through
9946 parentheses. Rather than remember where the parentheses were, we
9947 don't associate floats at all, unless the user has specified
9949 And, we need to make sure type is not saturating. */
9951 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9952 && !TYPE_SATURATING (type))
9954 tree var0, con0, lit0, minus_lit0;
9955 tree var1, con1, lit1, minus_lit1;
9958 /* Split both trees into variables, constants, and literals. Then
9959 associate each group together, the constants with literals,
9960 then the result with variables. This increases the chances of
9961 literals being recombined later and of generating relocatable
9962 expressions for the sum of a constant and literal. */
9963 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9964 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9965 code == MINUS_EXPR);
9967 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9968 if (code == MINUS_EXPR)
9971 /* With undefined overflow we can only associate constants with one
9972 variable, and constants whose association doesn't overflow. */
9973 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9974 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9981 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9982 tmp0 = TREE_OPERAND (tmp0, 0);
9983 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9984 tmp1 = TREE_OPERAND (tmp1, 0);
9985 /* The only case we can still associate with two variables
9986 is if they are the same, modulo negation. */
9987 if (!operand_equal_p (tmp0, tmp1, 0))
9991 if (ok && lit0 && lit1)
9993 tree tmp0 = fold_convert (type, lit0);
9994 tree tmp1 = fold_convert (type, lit1);
9996 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9997 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10002 /* Only do something if we found more than two objects. Otherwise,
10003 nothing has changed and we risk infinite recursion. */
10005 && (2 < ((var0 != 0) + (var1 != 0)
10006 + (con0 != 0) + (con1 != 0)
10007 + (lit0 != 0) + (lit1 != 0)
10008 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10010 var0 = associate_trees (loc, var0, var1, code, type);
10011 con0 = associate_trees (loc, con0, con1, code, type);
10012 lit0 = associate_trees (loc, lit0, lit1, code, type);
10013 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10015 /* Preserve the MINUS_EXPR if the negative part of the literal is
10016 greater than the positive part. Otherwise, the multiplicative
10017 folding code (i.e extract_muldiv) may be fooled in case
10018 unsigned constants are subtracted, like in the following
10019 example: ((X*2 + 4) - 8U)/2. */
10020 if (minus_lit0 && lit0)
10022 if (TREE_CODE (lit0) == INTEGER_CST
10023 && TREE_CODE (minus_lit0) == INTEGER_CST
10024 && tree_int_cst_lt (lit0, minus_lit0))
10026 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10032 lit0 = associate_trees (loc, lit0, minus_lit0,
10041 fold_convert_loc (loc, type,
10042 associate_trees (loc, var0, minus_lit0,
10043 MINUS_EXPR, type));
10046 con0 = associate_trees (loc, con0, minus_lit0,
10049 fold_convert_loc (loc, type,
10050 associate_trees (loc, var0, con0,
10055 con0 = associate_trees (loc, con0, lit0, code, type);
10057 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10065 /* Pointer simplifications for subtraction, simple reassociations. */
10066 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10068 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10069 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10070 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10072 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10073 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10074 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10075 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10076 return fold_build2_loc (loc, PLUS_EXPR, type,
10077 fold_build2_loc (loc, MINUS_EXPR, type,
10079 fold_build2_loc (loc, MINUS_EXPR, type,
10082 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10083 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10085 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10086 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10087 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10088 fold_convert_loc (loc, type, arg1));
10090 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10093 /* A - (-B) -> A + B */
10094 if (TREE_CODE (arg1) == NEGATE_EXPR)
10095 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10096 fold_convert_loc (loc, type,
10097 TREE_OPERAND (arg1, 0)));
10098 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10099 if (TREE_CODE (arg0) == NEGATE_EXPR
10100 && (FLOAT_TYPE_P (type)
10101 || INTEGRAL_TYPE_P (type))
10102 && negate_expr_p (arg1)
10103 && reorder_operands_p (arg0, arg1))
10104 return fold_build2_loc (loc, MINUS_EXPR, type,
10105 fold_convert_loc (loc, type,
10106 negate_expr (arg1)),
10107 fold_convert_loc (loc, type,
10108 TREE_OPERAND (arg0, 0)));
10109 /* Convert -A - 1 to ~A. */
10110 if (INTEGRAL_TYPE_P (type)
10111 && TREE_CODE (arg0) == NEGATE_EXPR
10112 && integer_onep (arg1)
10113 && !TYPE_OVERFLOW_TRAPS (type))
10114 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10115 fold_convert_loc (loc, type,
10116 TREE_OPERAND (arg0, 0)));
10118 /* Convert -1 - A to ~A. */
10119 if (INTEGRAL_TYPE_P (type)
10120 && integer_all_onesp (arg0))
10121 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10124 /* X - (X / CST) * CST is X % CST. */
10125 if (INTEGRAL_TYPE_P (type)
10126 && TREE_CODE (arg1) == MULT_EXPR
10127 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10128 && operand_equal_p (arg0,
10129 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10130 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10131 TREE_OPERAND (arg1, 1), 0))
10133 fold_convert_loc (loc, type,
10134 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10135 arg0, TREE_OPERAND (arg1, 1)));
10137 if (! FLOAT_TYPE_P (type))
10139 if (integer_zerop (arg0))
10140 return negate_expr (fold_convert_loc (loc, type, arg1));
10141 if (integer_zerop (arg1))
10142 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10144 /* Fold A - (A & B) into ~B & A. */
10145 if (!TREE_SIDE_EFFECTS (arg0)
10146 && TREE_CODE (arg1) == BIT_AND_EXPR)
10148 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10150 tree arg10 = fold_convert_loc (loc, type,
10151 TREE_OPERAND (arg1, 0));
10152 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10153 fold_build1_loc (loc, BIT_NOT_EXPR,
10155 fold_convert_loc (loc, type, arg0));
10157 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10159 tree arg11 = fold_convert_loc (loc,
10160 type, TREE_OPERAND (arg1, 1));
10161 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10162 fold_build1_loc (loc, BIT_NOT_EXPR,
10164 fold_convert_loc (loc, type, arg0));
10168 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10169 any power of 2 minus 1. */
10170 if (TREE_CODE (arg0) == BIT_AND_EXPR
10171 && TREE_CODE (arg1) == BIT_AND_EXPR
10172 && operand_equal_p (TREE_OPERAND (arg0, 0),
10173 TREE_OPERAND (arg1, 0), 0))
10175 tree mask0 = TREE_OPERAND (arg0, 1);
10176 tree mask1 = TREE_OPERAND (arg1, 1);
10177 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10179 if (operand_equal_p (tem, mask1, 0))
10181 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10182 TREE_OPERAND (arg0, 0), mask1);
10183 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10188 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10189 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10190 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10192 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10193 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10194 (-ARG1 + ARG0) reduces to -ARG1. */
10195 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10196 return negate_expr (fold_convert_loc (loc, type, arg1));
10198 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10199 __complex__ ( x, -y ). This is not the same for SNaNs or if
10200 signed zeros are involved. */
10201 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10202 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10203 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10205 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10206 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10207 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10208 bool arg0rz = false, arg0iz = false;
10209 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10210 || (arg0i && (arg0iz = real_zerop (arg0i))))
10212 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10213 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10214 if (arg0rz && arg1i && real_zerop (arg1i))
10216 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10218 : build1 (REALPART_EXPR, rtype, arg1));
10219 tree ip = arg0i ? arg0i
10220 : build1 (IMAGPART_EXPR, rtype, arg0);
10221 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10223 else if (arg0iz && arg1r && real_zerop (arg1r))
10225 tree rp = arg0r ? arg0r
10226 : build1 (REALPART_EXPR, rtype, arg0);
10227 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10229 : build1 (IMAGPART_EXPR, rtype, arg1));
10230 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10235 /* Fold &x - &x. This can happen from &x.foo - &x.
10236 This is unsafe for certain floats even in non-IEEE formats.
10237 In IEEE, it is unsafe because it does wrong for NaNs.
10238 Also note that operand_equal_p is always false if an operand
10241 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10242 && operand_equal_p (arg0, arg1, 0))
10243 return build_zero_cst (type);
10245 /* A - B -> A + (-B) if B is easily negatable. */
10246 if (negate_expr_p (arg1)
10247 && ((FLOAT_TYPE_P (type)
10248 /* Avoid this transformation if B is a positive REAL_CST. */
10249 && (TREE_CODE (arg1) != REAL_CST
10250 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10251 || INTEGRAL_TYPE_P (type)))
10252 return fold_build2_loc (loc, PLUS_EXPR, type,
10253 fold_convert_loc (loc, type, arg0),
10254 fold_convert_loc (loc, type,
10255 negate_expr (arg1)));
10257 /* Try folding difference of addresses. */
10259 HOST_WIDE_INT diff;
10261 if ((TREE_CODE (arg0) == ADDR_EXPR
10262 || TREE_CODE (arg1) == ADDR_EXPR)
10263 && ptr_difference_const (arg0, arg1, &diff))
10264 return build_int_cst_type (type, diff);
10267 /* Fold &a[i] - &a[j] to i-j. */
10268 if (TREE_CODE (arg0) == ADDR_EXPR
10269 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10270 && TREE_CODE (arg1) == ADDR_EXPR
10271 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10273 tree aref0 = TREE_OPERAND (arg0, 0);
10274 tree aref1 = TREE_OPERAND (arg1, 0);
10275 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10276 TREE_OPERAND (aref1, 0), 0))
10278 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10279 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10280 tree esz = array_ref_element_size (aref0);
10281 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10282 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10283 fold_convert_loc (loc, type, esz));
10288 if (FLOAT_TYPE_P (type)
10289 && flag_unsafe_math_optimizations
10290 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10291 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10292 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10295 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10296 same or one. Make sure type is not saturating.
10297 fold_plusminus_mult_expr will re-associate. */
10298 if ((TREE_CODE (arg0) == MULT_EXPR
10299 || TREE_CODE (arg1) == MULT_EXPR)
10300 && !TYPE_SATURATING (type)
10301 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10303 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10311 /* (-A) * (-B) -> A * B */
10312 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10313 return fold_build2_loc (loc, MULT_EXPR, type,
10314 fold_convert_loc (loc, type,
10315 TREE_OPERAND (arg0, 0)),
10316 fold_convert_loc (loc, type,
10317 negate_expr (arg1)));
10318 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10319 return fold_build2_loc (loc, MULT_EXPR, type,
10320 fold_convert_loc (loc, type,
10321 negate_expr (arg0)),
10322 fold_convert_loc (loc, type,
10323 TREE_OPERAND (arg1, 0)));
10325 if (! FLOAT_TYPE_P (type))
10327 if (integer_zerop (arg1))
10328 return omit_one_operand_loc (loc, type, arg1, arg0);
10329 if (integer_onep (arg1))
10330 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10331 /* Transform x * -1 into -x. Make sure to do the negation
10332 on the original operand with conversions not stripped
10333 because we can only strip non-sign-changing conversions. */
10334 if (integer_all_onesp (arg1))
10335 return fold_convert_loc (loc, type, negate_expr (op0));
10336 /* Transform x * -C into -x * C if x is easily negatable. */
10337 if (TREE_CODE (arg1) == INTEGER_CST
10338 && tree_int_cst_sgn (arg1) == -1
10339 && negate_expr_p (arg0)
10340 && (tem = negate_expr (arg1)) != arg1
10341 && !TREE_OVERFLOW (tem))
10342 return fold_build2_loc (loc, MULT_EXPR, type,
10343 fold_convert_loc (loc, type,
10344 negate_expr (arg0)),
10347 /* (a * (1 << b)) is (a << b) */
10348 if (TREE_CODE (arg1) == LSHIFT_EXPR
10349 && integer_onep (TREE_OPERAND (arg1, 0)))
10350 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10351 TREE_OPERAND (arg1, 1));
10352 if (TREE_CODE (arg0) == LSHIFT_EXPR
10353 && integer_onep (TREE_OPERAND (arg0, 0)))
10354 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10355 TREE_OPERAND (arg0, 1));
10357 /* (A + A) * C -> A * 2 * C */
10358 if (TREE_CODE (arg0) == PLUS_EXPR
10359 && TREE_CODE (arg1) == INTEGER_CST
10360 && operand_equal_p (TREE_OPERAND (arg0, 0),
10361 TREE_OPERAND (arg0, 1), 0))
10362 return fold_build2_loc (loc, MULT_EXPR, type,
10363 omit_one_operand_loc (loc, type,
10364 TREE_OPERAND (arg0, 0),
10365 TREE_OPERAND (arg0, 1)),
10366 fold_build2_loc (loc, MULT_EXPR, type,
10367 build_int_cst (type, 2) , arg1));
10369 strict_overflow_p = false;
10370 if (TREE_CODE (arg1) == INTEGER_CST
10371 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10372 &strict_overflow_p)))
10374 if (strict_overflow_p)
10375 fold_overflow_warning (("assuming signed overflow does not "
10376 "occur when simplifying "
10378 WARN_STRICT_OVERFLOW_MISC);
10379 return fold_convert_loc (loc, type, tem);
10382 /* Optimize z * conj(z) for integer complex numbers. */
10383 if (TREE_CODE (arg0) == CONJ_EXPR
10384 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10385 return fold_mult_zconjz (loc, type, arg1);
10386 if (TREE_CODE (arg1) == CONJ_EXPR
10387 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10388 return fold_mult_zconjz (loc, type, arg0);
10392 /* Maybe fold x * 0 to 0. The expressions aren't the same
10393 when x is NaN, since x * 0 is also NaN. Nor are they the
10394 same in modes with signed zeros, since multiplying a
10395 negative value by 0 gives -0, not +0. */
10396 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10397 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10398 && real_zerop (arg1))
10399 return omit_one_operand_loc (loc, type, arg1, arg0);
10400 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10401 Likewise for complex arithmetic with signed zeros. */
10402 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10403 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10404 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10405 && real_onep (arg1))
10406 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10408 /* Transform x * -1.0 into -x. */
10409 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10410 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10411 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10412 && real_minus_onep (arg1))
10413 return fold_convert_loc (loc, type, negate_expr (arg0));
10415 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10416 the result for floating point types due to rounding so it is applied
10417 only if -fassociative-math was specify. */
10418 if (flag_associative_math
10419 && TREE_CODE (arg0) == RDIV_EXPR
10420 && TREE_CODE (arg1) == REAL_CST
10421 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10423 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10426 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10427 TREE_OPERAND (arg0, 1));
10430 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10431 if (operand_equal_p (arg0, arg1, 0))
10433 tree tem = fold_strip_sign_ops (arg0);
10434 if (tem != NULL_TREE)
10436 tem = fold_convert_loc (loc, type, tem);
10437 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10441 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10442 This is not the same for NaNs or if signed zeros are
10444 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10445 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10446 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10447 && TREE_CODE (arg1) == COMPLEX_CST
10448 && real_zerop (TREE_REALPART (arg1)))
10450 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10451 if (real_onep (TREE_IMAGPART (arg1)))
10453 fold_build2_loc (loc, COMPLEX_EXPR, type,
10454 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10456 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10457 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10459 fold_build2_loc (loc, COMPLEX_EXPR, type,
10460 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10461 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10465 /* Optimize z * conj(z) for floating point complex numbers.
10466 Guarded by flag_unsafe_math_optimizations as non-finite
10467 imaginary components don't produce scalar results. */
10468 if (flag_unsafe_math_optimizations
10469 && TREE_CODE (arg0) == CONJ_EXPR
10470 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10471 return fold_mult_zconjz (loc, type, arg1);
10472 if (flag_unsafe_math_optimizations
10473 && TREE_CODE (arg1) == CONJ_EXPR
10474 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10475 return fold_mult_zconjz (loc, type, arg0);
10477 if (flag_unsafe_math_optimizations)
10479 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10480 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10482 /* Optimizations of root(...)*root(...). */
10483 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10486 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10487 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10489 /* Optimize sqrt(x)*sqrt(x) as x. */
10490 if (BUILTIN_SQRT_P (fcode0)
10491 && operand_equal_p (arg00, arg10, 0)
10492 && ! HONOR_SNANS (TYPE_MODE (type)))
10495 /* Optimize root(x)*root(y) as root(x*y). */
10496 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10497 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10498 return build_call_expr_loc (loc, rootfn, 1, arg);
10501 /* Optimize expN(x)*expN(y) as expN(x+y). */
10502 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10504 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10505 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10506 CALL_EXPR_ARG (arg0, 0),
10507 CALL_EXPR_ARG (arg1, 0));
10508 return build_call_expr_loc (loc, expfn, 1, arg);
10511 /* Optimizations of pow(...)*pow(...). */
10512 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10513 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10514 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10516 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10517 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10518 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10519 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10521 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10522 if (operand_equal_p (arg01, arg11, 0))
10524 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10525 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10527 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10530 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10531 if (operand_equal_p (arg00, arg10, 0))
10533 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10534 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10536 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10540 /* Optimize tan(x)*cos(x) as sin(x). */
10541 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10542 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10543 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10544 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10545 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10546 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10547 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10548 CALL_EXPR_ARG (arg1, 0), 0))
10550 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10552 if (sinfn != NULL_TREE)
10553 return build_call_expr_loc (loc, sinfn, 1,
10554 CALL_EXPR_ARG (arg0, 0));
10557 /* Optimize x*pow(x,c) as pow(x,c+1). */
10558 if (fcode1 == BUILT_IN_POW
10559 || fcode1 == BUILT_IN_POWF
10560 || fcode1 == BUILT_IN_POWL)
10562 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10563 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10564 if (TREE_CODE (arg11) == REAL_CST
10565 && !TREE_OVERFLOW (arg11)
10566 && operand_equal_p (arg0, arg10, 0))
10568 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10572 c = TREE_REAL_CST (arg11);
10573 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10574 arg = build_real (type, c);
10575 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10579 /* Optimize pow(x,c)*x as pow(x,c+1). */
10580 if (fcode0 == BUILT_IN_POW
10581 || fcode0 == BUILT_IN_POWF
10582 || fcode0 == BUILT_IN_POWL)
10584 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10585 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10586 if (TREE_CODE (arg01) == REAL_CST
10587 && !TREE_OVERFLOW (arg01)
10588 && operand_equal_p (arg1, arg00, 0))
10590 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10594 c = TREE_REAL_CST (arg01);
10595 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10596 arg = build_real (type, c);
10597 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10601 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10602 if (!in_gimple_form
10603 && optimize_function_for_speed_p (cfun)
10604 && operand_equal_p (arg0, arg1, 0))
10606 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10610 tree arg = build_real (type, dconst2);
10611 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10620 if (integer_all_onesp (arg1))
10621 return omit_one_operand_loc (loc, type, arg1, arg0);
10622 if (integer_zerop (arg1))
10623 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10624 if (operand_equal_p (arg0, arg1, 0))
10625 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10627 /* ~X | X is -1. */
10628 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10629 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10631 t1 = build_zero_cst (type);
10632 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10633 return omit_one_operand_loc (loc, type, t1, arg1);
10636 /* X | ~X is -1. */
10637 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10638 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10640 t1 = build_zero_cst (type);
10641 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10642 return omit_one_operand_loc (loc, type, t1, arg0);
10645 /* Canonicalize (X & C1) | C2. */
10646 if (TREE_CODE (arg0) == BIT_AND_EXPR
10647 && TREE_CODE (arg1) == INTEGER_CST
10648 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10650 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10651 int width = TYPE_PRECISION (type), w;
10652 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10653 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10654 hi2 = TREE_INT_CST_HIGH (arg1);
10655 lo2 = TREE_INT_CST_LOW (arg1);
10657 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10658 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10659 return omit_one_operand_loc (loc, type, arg1,
10660 TREE_OPERAND (arg0, 0));
10662 if (width > HOST_BITS_PER_WIDE_INT)
10664 mhi = (unsigned HOST_WIDE_INT) -1
10665 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10671 mlo = (unsigned HOST_WIDE_INT) -1
10672 >> (HOST_BITS_PER_WIDE_INT - width);
10675 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10676 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10677 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10678 TREE_OPERAND (arg0, 0), arg1);
10680 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10681 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10682 mode which allows further optimizations. */
10689 for (w = BITS_PER_UNIT;
10690 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10693 unsigned HOST_WIDE_INT mask
10694 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10695 if (((lo1 | lo2) & mask) == mask
10696 && (lo1 & ~mask) == 0 && hi1 == 0)
10703 if (hi3 != hi1 || lo3 != lo1)
10704 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10705 fold_build2_loc (loc, BIT_AND_EXPR, type,
10706 TREE_OPERAND (arg0, 0),
10707 build_int_cst_wide (type,
10712 /* (X & Y) | Y is (X, Y). */
10713 if (TREE_CODE (arg0) == BIT_AND_EXPR
10714 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10715 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10716 /* (X & Y) | X is (Y, X). */
10717 if (TREE_CODE (arg0) == BIT_AND_EXPR
10718 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10719 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10720 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10721 /* X | (X & Y) is (Y, X). */
10722 if (TREE_CODE (arg1) == BIT_AND_EXPR
10723 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10724 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10725 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10726 /* X | (Y & X) is (Y, X). */
10727 if (TREE_CODE (arg1) == BIT_AND_EXPR
10728 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10729 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10730 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10732 /* (X & ~Y) | (~X & Y) is X ^ Y */
10733 if (TREE_CODE (arg0) == BIT_AND_EXPR
10734 && TREE_CODE (arg1) == BIT_AND_EXPR)
10736 tree a0, a1, l0, l1, n0, n1;
10738 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10739 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10741 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10742 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10744 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10745 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10747 if ((operand_equal_p (n0, a0, 0)
10748 && operand_equal_p (n1, a1, 0))
10749 || (operand_equal_p (n0, a1, 0)
10750 && operand_equal_p (n1, a0, 0)))
10751 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10754 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10755 if (t1 != NULL_TREE)
10758 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10760 This results in more efficient code for machines without a NAND
10761 instruction. Combine will canonicalize to the first form
10762 which will allow use of NAND instructions provided by the
10763 backend if they exist. */
10764 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10765 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10768 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10769 build2 (BIT_AND_EXPR, type,
10770 fold_convert_loc (loc, type,
10771 TREE_OPERAND (arg0, 0)),
10772 fold_convert_loc (loc, type,
10773 TREE_OPERAND (arg1, 0))));
10776 /* See if this can be simplified into a rotate first. If that
10777 is unsuccessful continue in the association code. */
10781 if (integer_zerop (arg1))
10782 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10783 if (integer_all_onesp (arg1))
10784 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10785 if (operand_equal_p (arg0, arg1, 0))
10786 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10788 /* ~X ^ X is -1. */
10789 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10790 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10792 t1 = build_zero_cst (type);
10793 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10794 return omit_one_operand_loc (loc, type, t1, arg1);
10797 /* X ^ ~X is -1. */
10798 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10799 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10801 t1 = build_zero_cst (type);
10802 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10803 return omit_one_operand_loc (loc, type, t1, arg0);
10806 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10807 with a constant, and the two constants have no bits in common,
10808 we should treat this as a BIT_IOR_EXPR since this may produce more
10809 simplifications. */
10810 if (TREE_CODE (arg0) == BIT_AND_EXPR
10811 && TREE_CODE (arg1) == BIT_AND_EXPR
10812 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10813 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10814 && integer_zerop (const_binop (BIT_AND_EXPR,
10815 TREE_OPERAND (arg0, 1),
10816 TREE_OPERAND (arg1, 1))))
10818 code = BIT_IOR_EXPR;
10822 /* (X | Y) ^ X -> Y & ~ X*/
10823 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10824 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10826 tree t2 = TREE_OPERAND (arg0, 1);
10827 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10829 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10830 fold_convert_loc (loc, type, t2),
10831 fold_convert_loc (loc, type, t1));
10835 /* (Y | X) ^ X -> Y & ~ X*/
10836 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10837 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10839 tree t2 = TREE_OPERAND (arg0, 0);
10840 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10842 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10843 fold_convert_loc (loc, type, t2),
10844 fold_convert_loc (loc, type, t1));
10848 /* X ^ (X | Y) -> Y & ~ X*/
10849 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10850 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10852 tree t2 = TREE_OPERAND (arg1, 1);
10853 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10855 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10856 fold_convert_loc (loc, type, t2),
10857 fold_convert_loc (loc, type, t1));
10861 /* X ^ (Y | X) -> Y & ~ X*/
10862 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10863 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10865 tree t2 = TREE_OPERAND (arg1, 0);
10866 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10868 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10869 fold_convert_loc (loc, type, t2),
10870 fold_convert_loc (loc, type, t1));
10874 /* Convert ~X ^ ~Y to X ^ Y. */
10875 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10876 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10877 return fold_build2_loc (loc, code, type,
10878 fold_convert_loc (loc, type,
10879 TREE_OPERAND (arg0, 0)),
10880 fold_convert_loc (loc, type,
10881 TREE_OPERAND (arg1, 0)));
10883 /* Convert ~X ^ C to X ^ ~C. */
10884 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10885 && TREE_CODE (arg1) == INTEGER_CST)
10886 return fold_build2_loc (loc, code, type,
10887 fold_convert_loc (loc, type,
10888 TREE_OPERAND (arg0, 0)),
10889 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10891 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10892 if (TREE_CODE (arg0) == BIT_AND_EXPR
10893 && integer_onep (TREE_OPERAND (arg0, 1))
10894 && integer_onep (arg1))
10895 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10896 build_int_cst (TREE_TYPE (arg0), 0));
10898 /* Fold (X & Y) ^ Y as ~X & Y. */
10899 if (TREE_CODE (arg0) == BIT_AND_EXPR
10900 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10902 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10903 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10904 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10905 fold_convert_loc (loc, type, arg1));
10907 /* Fold (X & Y) ^ X as ~Y & X. */
10908 if (TREE_CODE (arg0) == BIT_AND_EXPR
10909 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10910 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10912 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10913 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10914 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10915 fold_convert_loc (loc, type, arg1));
10917 /* Fold X ^ (X & Y) as X & ~Y. */
10918 if (TREE_CODE (arg1) == BIT_AND_EXPR
10919 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10921 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10922 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10923 fold_convert_loc (loc, type, arg0),
10924 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10926 /* Fold X ^ (Y & X) as ~Y & X. */
10927 if (TREE_CODE (arg1) == BIT_AND_EXPR
10928 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10929 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10931 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10932 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10933 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10934 fold_convert_loc (loc, type, arg0));
10937 /* See if this can be simplified into a rotate first. If that
10938 is unsuccessful continue in the association code. */
10942 if (integer_all_onesp (arg1))
10943 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10944 if (integer_zerop (arg1))
10945 return omit_one_operand_loc (loc, type, arg1, arg0);
10946 if (operand_equal_p (arg0, arg1, 0))
10947 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10949 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10950 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
10951 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
10952 || (TREE_CODE (arg0) == EQ_EXPR
10953 && integer_zerop (TREE_OPERAND (arg0, 1))))
10954 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10955 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10957 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10958 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
10959 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
10960 || (TREE_CODE (arg1) == EQ_EXPR
10961 && integer_zerop (TREE_OPERAND (arg1, 1))))
10962 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10963 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10965 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10966 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10967 && TREE_CODE (arg1) == INTEGER_CST
10968 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10970 tree tmp1 = fold_convert_loc (loc, type, arg1);
10971 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10972 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10973 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10974 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10976 fold_convert_loc (loc, type,
10977 fold_build2_loc (loc, BIT_IOR_EXPR,
10978 type, tmp2, tmp3));
10981 /* (X | Y) & Y is (X, Y). */
10982 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10983 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10984 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10985 /* (X | Y) & X is (Y, X). */
10986 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10987 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10988 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10989 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10990 /* X & (X | Y) is (Y, X). */
10991 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10992 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10993 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10994 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10995 /* X & (Y | X) is (Y, X). */
10996 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10997 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10998 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10999 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11001 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11002 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11003 && integer_onep (TREE_OPERAND (arg0, 1))
11004 && integer_onep (arg1))
11006 tem = TREE_OPERAND (arg0, 0);
11007 return fold_build2_loc (loc, EQ_EXPR, type,
11008 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11009 build_int_cst (TREE_TYPE (tem), 1)),
11010 build_int_cst (TREE_TYPE (tem), 0));
11012 /* Fold ~X & 1 as (X & 1) == 0. */
11013 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11014 && integer_onep (arg1))
11016 tem = TREE_OPERAND (arg0, 0);
11017 return fold_build2_loc (loc, EQ_EXPR, type,
11018 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11019 build_int_cst (TREE_TYPE (tem), 1)),
11020 build_int_cst (TREE_TYPE (tem), 0));
11022 /* Fold !X & 1 as X == 0. */
11023 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11024 && integer_onep (arg1))
11026 tem = TREE_OPERAND (arg0, 0);
11027 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11028 build_int_cst (TREE_TYPE (tem), 0));
11031 /* Fold (X ^ Y) & Y as ~X & Y. */
11032 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11033 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11035 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11036 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11037 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11038 fold_convert_loc (loc, type, arg1));
11040 /* Fold (X ^ Y) & X as ~Y & X. */
11041 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11042 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11043 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11045 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11046 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11047 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11048 fold_convert_loc (loc, type, arg1));
11050 /* Fold X & (X ^ Y) as X & ~Y. */
11051 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11052 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11054 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11055 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11056 fold_convert_loc (loc, type, arg0),
11057 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11059 /* Fold X & (Y ^ X) as ~Y & X. */
11060 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11061 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11062 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11064 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11065 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11066 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11067 fold_convert_loc (loc, type, arg0));
11070 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11071 ((A & N) + B) & M -> (A + B) & M
11072 Similarly if (N & M) == 0,
11073 ((A | N) + B) & M -> (A + B) & M
11074 and for - instead of + (or unary - instead of +)
11075 and/or ^ instead of |.
11076 If B is constant and (B & M) == 0, fold into A & M. */
11077 if (host_integerp (arg1, 1))
11079 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11080 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11081 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11082 && (TREE_CODE (arg0) == PLUS_EXPR
11083 || TREE_CODE (arg0) == MINUS_EXPR
11084 || TREE_CODE (arg0) == NEGATE_EXPR)
11085 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11086 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11090 unsigned HOST_WIDE_INT cst0;
11092 /* Now we know that arg0 is (C + D) or (C - D) or
11093 -C and arg1 (M) is == (1LL << cst) - 1.
11094 Store C into PMOP[0] and D into PMOP[1]. */
11095 pmop[0] = TREE_OPERAND (arg0, 0);
11097 if (TREE_CODE (arg0) != NEGATE_EXPR)
11099 pmop[1] = TREE_OPERAND (arg0, 1);
11103 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11104 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11108 for (; which >= 0; which--)
11109 switch (TREE_CODE (pmop[which]))
11114 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11117 /* tree_low_cst not used, because we don't care about
11119 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11121 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11126 else if (cst0 != 0)
11128 /* If C or D is of the form (A & N) where
11129 (N & M) == M, or of the form (A | N) or
11130 (A ^ N) where (N & M) == 0, replace it with A. */
11131 pmop[which] = TREE_OPERAND (pmop[which], 0);
11134 /* If C or D is a N where (N & M) == 0, it can be
11135 omitted (assumed 0). */
11136 if ((TREE_CODE (arg0) == PLUS_EXPR
11137 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11138 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11139 pmop[which] = NULL;
11145 /* Only build anything new if we optimized one or both arguments
11147 if (pmop[0] != TREE_OPERAND (arg0, 0)
11148 || (TREE_CODE (arg0) != NEGATE_EXPR
11149 && pmop[1] != TREE_OPERAND (arg0, 1)))
11151 tree utype = TREE_TYPE (arg0);
11152 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11154 /* Perform the operations in a type that has defined
11155 overflow behavior. */
11156 utype = unsigned_type_for (TREE_TYPE (arg0));
11157 if (pmop[0] != NULL)
11158 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11159 if (pmop[1] != NULL)
11160 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11163 if (TREE_CODE (arg0) == NEGATE_EXPR)
11164 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11165 else if (TREE_CODE (arg0) == PLUS_EXPR)
11167 if (pmop[0] != NULL && pmop[1] != NULL)
11168 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11170 else if (pmop[0] != NULL)
11172 else if (pmop[1] != NULL)
11175 return build_int_cst (type, 0);
11177 else if (pmop[0] == NULL)
11178 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11180 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11182 /* TEM is now the new binary +, - or unary - replacement. */
11183 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11184 fold_convert_loc (loc, utype, arg1));
11185 return fold_convert_loc (loc, type, tem);
11190 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11191 if (t1 != NULL_TREE)
11193 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11194 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11195 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11198 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11200 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11201 && (~TREE_INT_CST_LOW (arg1)
11202 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11204 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11207 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11209 This results in more efficient code for machines without a NOR
11210 instruction. Combine will canonicalize to the first form
11211 which will allow use of NOR instructions provided by the
11212 backend if they exist. */
11213 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11214 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11216 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11217 build2 (BIT_IOR_EXPR, type,
11218 fold_convert_loc (loc, type,
11219 TREE_OPERAND (arg0, 0)),
11220 fold_convert_loc (loc, type,
11221 TREE_OPERAND (arg1, 0))));
11224 /* If arg0 is derived from the address of an object or function, we may
11225 be able to fold this expression using the object or function's
11227 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11229 unsigned HOST_WIDE_INT modulus, residue;
11230 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11232 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11233 integer_onep (arg1));
11235 /* This works because modulus is a power of 2. If this weren't the
11236 case, we'd have to replace it by its greatest power-of-2
11237 divisor: modulus & -modulus. */
11239 return build_int_cst (type, residue & low);
11242 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11243 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11244 if the new mask might be further optimized. */
11245 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11246 || TREE_CODE (arg0) == RSHIFT_EXPR)
11247 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11248 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11249 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11250 < TYPE_PRECISION (TREE_TYPE (arg0))
11251 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11252 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11254 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11255 unsigned HOST_WIDE_INT mask
11256 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11257 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11258 tree shift_type = TREE_TYPE (arg0);
11260 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11261 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11262 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11263 && TYPE_PRECISION (TREE_TYPE (arg0))
11264 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11266 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11267 tree arg00 = TREE_OPERAND (arg0, 0);
11268 /* See if more bits can be proven as zero because of
11270 if (TREE_CODE (arg00) == NOP_EXPR
11271 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11273 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11274 if (TYPE_PRECISION (inner_type)
11275 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11276 && TYPE_PRECISION (inner_type) < prec)
11278 prec = TYPE_PRECISION (inner_type);
11279 /* See if we can shorten the right shift. */
11281 shift_type = inner_type;
11284 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11285 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11286 zerobits <<= prec - shiftc;
11287 /* For arithmetic shift if sign bit could be set, zerobits
11288 can contain actually sign bits, so no transformation is
11289 possible, unless MASK masks them all away. In that
11290 case the shift needs to be converted into logical shift. */
11291 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11292 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11294 if ((mask & zerobits) == 0)
11295 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11301 /* ((X << 16) & 0xff00) is (X, 0). */
11302 if ((mask & zerobits) == mask)
11303 return omit_one_operand_loc (loc, type,
11304 build_int_cst (type, 0), arg0);
11306 newmask = mask | zerobits;
11307 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11311 /* Only do the transformation if NEWMASK is some integer
11313 for (prec = BITS_PER_UNIT;
11314 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11315 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11317 if (prec < HOST_BITS_PER_WIDE_INT
11318 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11322 if (shift_type != TREE_TYPE (arg0))
11324 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11325 fold_convert_loc (loc, shift_type,
11326 TREE_OPERAND (arg0, 0)),
11327 TREE_OPERAND (arg0, 1));
11328 tem = fold_convert_loc (loc, type, tem);
11332 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11333 if (!tree_int_cst_equal (newmaskt, arg1))
11334 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11342 /* Don't touch a floating-point divide by zero unless the mode
11343 of the constant can represent infinity. */
11344 if (TREE_CODE (arg1) == REAL_CST
11345 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11346 && real_zerop (arg1))
11349 /* Optimize A / A to 1.0 if we don't care about
11350 NaNs or Infinities. Skip the transformation
11351 for non-real operands. */
11352 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11353 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11354 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11355 && operand_equal_p (arg0, arg1, 0))
11357 tree r = build_real (TREE_TYPE (arg0), dconst1);
11359 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11362 /* The complex version of the above A / A optimization. */
11363 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11364 && operand_equal_p (arg0, arg1, 0))
11366 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11367 if (! HONOR_NANS (TYPE_MODE (elem_type))
11368 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11370 tree r = build_real (elem_type, dconst1);
11371 /* omit_two_operands will call fold_convert for us. */
11372 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11376 /* (-A) / (-B) -> A / B */
11377 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11378 return fold_build2_loc (loc, RDIV_EXPR, type,
11379 TREE_OPERAND (arg0, 0),
11380 negate_expr (arg1));
11381 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11382 return fold_build2_loc (loc, RDIV_EXPR, type,
11383 negate_expr (arg0),
11384 TREE_OPERAND (arg1, 0));
11386 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11387 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11388 && real_onep (arg1))
11389 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11391 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11392 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11393 && real_minus_onep (arg1))
11394 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11395 negate_expr (arg0)));
11397 /* If ARG1 is a constant, we can convert this to a multiply by the
11398 reciprocal. This does not have the same rounding properties,
11399 so only do this if -freciprocal-math. We can actually
11400 always safely do it if ARG1 is a power of two, but it's hard to
11401 tell if it is or not in a portable manner. */
11402 if (TREE_CODE (arg1) == REAL_CST)
11404 if (flag_reciprocal_math
11405 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11407 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11408 /* Find the reciprocal if optimizing and the result is exact. */
11412 r = TREE_REAL_CST (arg1);
11413 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11415 tem = build_real (type, r);
11416 return fold_build2_loc (loc, MULT_EXPR, type,
11417 fold_convert_loc (loc, type, arg0), tem);
11421 /* Convert A/B/C to A/(B*C). */
11422 if (flag_reciprocal_math
11423 && TREE_CODE (arg0) == RDIV_EXPR)
11424 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11425 fold_build2_loc (loc, MULT_EXPR, type,
11426 TREE_OPERAND (arg0, 1), arg1));
11428 /* Convert A/(B/C) to (A/B)*C. */
11429 if (flag_reciprocal_math
11430 && TREE_CODE (arg1) == RDIV_EXPR)
11431 return fold_build2_loc (loc, MULT_EXPR, type,
11432 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11433 TREE_OPERAND (arg1, 0)),
11434 TREE_OPERAND (arg1, 1));
11436 /* Convert C1/(X*C2) into (C1/C2)/X. */
11437 if (flag_reciprocal_math
11438 && TREE_CODE (arg1) == MULT_EXPR
11439 && TREE_CODE (arg0) == REAL_CST
11440 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11442 tree tem = const_binop (RDIV_EXPR, arg0,
11443 TREE_OPERAND (arg1, 1));
11445 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11446 TREE_OPERAND (arg1, 0));
11449 if (flag_unsafe_math_optimizations)
11451 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11452 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11454 /* Optimize sin(x)/cos(x) as tan(x). */
11455 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11456 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11457 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11458 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11459 CALL_EXPR_ARG (arg1, 0), 0))
11461 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11463 if (tanfn != NULL_TREE)
11464 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11467 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11468 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11469 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11470 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11471 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11472 CALL_EXPR_ARG (arg1, 0), 0))
11474 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11476 if (tanfn != NULL_TREE)
11478 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11479 CALL_EXPR_ARG (arg0, 0));
11480 return fold_build2_loc (loc, RDIV_EXPR, type,
11481 build_real (type, dconst1), tmp);
11485 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11486 NaNs or Infinities. */
11487 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11488 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11489 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11491 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11492 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11494 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11495 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11496 && operand_equal_p (arg00, arg01, 0))
11498 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11500 if (cosfn != NULL_TREE)
11501 return build_call_expr_loc (loc, cosfn, 1, arg00);
11505 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11506 NaNs or Infinities. */
11507 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11508 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11509 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11511 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11512 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11514 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11515 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11516 && operand_equal_p (arg00, arg01, 0))
11518 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11520 if (cosfn != NULL_TREE)
11522 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11523 return fold_build2_loc (loc, RDIV_EXPR, type,
11524 build_real (type, dconst1),
11530 /* Optimize pow(x,c)/x as pow(x,c-1). */
11531 if (fcode0 == BUILT_IN_POW
11532 || fcode0 == BUILT_IN_POWF
11533 || fcode0 == BUILT_IN_POWL)
11535 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11536 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11537 if (TREE_CODE (arg01) == REAL_CST
11538 && !TREE_OVERFLOW (arg01)
11539 && operand_equal_p (arg1, arg00, 0))
11541 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11545 c = TREE_REAL_CST (arg01);
11546 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11547 arg = build_real (type, c);
11548 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11552 /* Optimize a/root(b/c) into a*root(c/b). */
11553 if (BUILTIN_ROOT_P (fcode1))
11555 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11557 if (TREE_CODE (rootarg) == RDIV_EXPR)
11559 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11560 tree b = TREE_OPERAND (rootarg, 0);
11561 tree c = TREE_OPERAND (rootarg, 1);
11563 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11565 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11566 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11570 /* Optimize x/expN(y) into x*expN(-y). */
11571 if (BUILTIN_EXPONENT_P (fcode1))
11573 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11574 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11575 arg1 = build_call_expr_loc (loc,
11577 fold_convert_loc (loc, type, arg));
11578 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11581 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11582 if (fcode1 == BUILT_IN_POW
11583 || fcode1 == BUILT_IN_POWF
11584 || fcode1 == BUILT_IN_POWL)
11586 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11587 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11588 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11589 tree neg11 = fold_convert_loc (loc, type,
11590 negate_expr (arg11));
11591 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11592 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11597 case TRUNC_DIV_EXPR:
11598 /* Optimize (X & (-A)) / A where A is a power of 2,
11600 if (TREE_CODE (arg0) == BIT_AND_EXPR
11601 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11602 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11604 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11605 arg1, TREE_OPERAND (arg0, 1));
11606 if (sum && integer_zerop (sum)) {
11607 unsigned long pow2;
11609 if (TREE_INT_CST_LOW (arg1))
11610 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11612 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11613 + HOST_BITS_PER_WIDE_INT;
11615 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11616 TREE_OPERAND (arg0, 0),
11617 build_int_cst (integer_type_node, pow2));
11623 case FLOOR_DIV_EXPR:
11624 /* Simplify A / (B << N) where A and B are positive and B is
11625 a power of 2, to A >> (N + log2(B)). */
11626 strict_overflow_p = false;
11627 if (TREE_CODE (arg1) == LSHIFT_EXPR
11628 && (TYPE_UNSIGNED (type)
11629 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11631 tree sval = TREE_OPERAND (arg1, 0);
11632 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11634 tree sh_cnt = TREE_OPERAND (arg1, 1);
11635 unsigned long pow2;
11637 if (TREE_INT_CST_LOW (sval))
11638 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11640 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11641 + HOST_BITS_PER_WIDE_INT;
11643 if (strict_overflow_p)
11644 fold_overflow_warning (("assuming signed overflow does not "
11645 "occur when simplifying A / (B << N)"),
11646 WARN_STRICT_OVERFLOW_MISC);
11648 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11650 build_int_cst (TREE_TYPE (sh_cnt),
11652 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11653 fold_convert_loc (loc, type, arg0), sh_cnt);
11657 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11658 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11659 if (INTEGRAL_TYPE_P (type)
11660 && TYPE_UNSIGNED (type)
11661 && code == FLOOR_DIV_EXPR)
11662 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11666 case ROUND_DIV_EXPR:
11667 case CEIL_DIV_EXPR:
11668 case EXACT_DIV_EXPR:
11669 if (integer_onep (arg1))
11670 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11671 if (integer_zerop (arg1))
11673 /* X / -1 is -X. */
11674 if (!TYPE_UNSIGNED (type)
11675 && TREE_CODE (arg1) == INTEGER_CST
11676 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11677 && TREE_INT_CST_HIGH (arg1) == -1)
11678 return fold_convert_loc (loc, type, negate_expr (arg0));
11680 /* Convert -A / -B to A / B when the type is signed and overflow is
11682 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11683 && TREE_CODE (arg0) == NEGATE_EXPR
11684 && negate_expr_p (arg1))
11686 if (INTEGRAL_TYPE_P (type))
11687 fold_overflow_warning (("assuming signed overflow does not occur "
11688 "when distributing negation across "
11690 WARN_STRICT_OVERFLOW_MISC);
11691 return fold_build2_loc (loc, code, type,
11692 fold_convert_loc (loc, type,
11693 TREE_OPERAND (arg0, 0)),
11694 fold_convert_loc (loc, type,
11695 negate_expr (arg1)));
11697 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11698 && TREE_CODE (arg1) == NEGATE_EXPR
11699 && negate_expr_p (arg0))
11701 if (INTEGRAL_TYPE_P (type))
11702 fold_overflow_warning (("assuming signed overflow does not occur "
11703 "when distributing negation across "
11705 WARN_STRICT_OVERFLOW_MISC);
11706 return fold_build2_loc (loc, code, type,
11707 fold_convert_loc (loc, type,
11708 negate_expr (arg0)),
11709 fold_convert_loc (loc, type,
11710 TREE_OPERAND (arg1, 0)));
11713 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11714 operation, EXACT_DIV_EXPR.
11716 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11717 At one time others generated faster code, it's not clear if they do
11718 after the last round to changes to the DIV code in expmed.c. */
11719 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11720 && multiple_of_p (type, arg0, arg1))
11721 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11723 strict_overflow_p = false;
11724 if (TREE_CODE (arg1) == INTEGER_CST
11725 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11726 &strict_overflow_p)))
11728 if (strict_overflow_p)
11729 fold_overflow_warning (("assuming signed overflow does not occur "
11730 "when simplifying division"),
11731 WARN_STRICT_OVERFLOW_MISC);
11732 return fold_convert_loc (loc, type, tem);
11737 case CEIL_MOD_EXPR:
11738 case FLOOR_MOD_EXPR:
11739 case ROUND_MOD_EXPR:
11740 case TRUNC_MOD_EXPR:
11741 /* X % 1 is always zero, but be sure to preserve any side
11743 if (integer_onep (arg1))
11744 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11746 /* X % 0, return X % 0 unchanged so that we can get the
11747 proper warnings and errors. */
11748 if (integer_zerop (arg1))
11751 /* 0 % X is always zero, but be sure to preserve any side
11752 effects in X. Place this after checking for X == 0. */
11753 if (integer_zerop (arg0))
11754 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11756 /* X % -1 is zero. */
11757 if (!TYPE_UNSIGNED (type)
11758 && TREE_CODE (arg1) == INTEGER_CST
11759 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11760 && TREE_INT_CST_HIGH (arg1) == -1)
11761 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11763 /* X % -C is the same as X % C. */
11764 if (code == TRUNC_MOD_EXPR
11765 && !TYPE_UNSIGNED (type)
11766 && TREE_CODE (arg1) == INTEGER_CST
11767 && !TREE_OVERFLOW (arg1)
11768 && TREE_INT_CST_HIGH (arg1) < 0
11769 && !TYPE_OVERFLOW_TRAPS (type)
11770 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11771 && !sign_bit_p (arg1, arg1))
11772 return fold_build2_loc (loc, code, type,
11773 fold_convert_loc (loc, type, arg0),
11774 fold_convert_loc (loc, type,
11775 negate_expr (arg1)));
11777 /* X % -Y is the same as X % Y. */
11778 if (code == TRUNC_MOD_EXPR
11779 && !TYPE_UNSIGNED (type)
11780 && TREE_CODE (arg1) == NEGATE_EXPR
11781 && !TYPE_OVERFLOW_TRAPS (type))
11782 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11783 fold_convert_loc (loc, type,
11784 TREE_OPERAND (arg1, 0)));
11786 strict_overflow_p = false;
11787 if (TREE_CODE (arg1) == INTEGER_CST
11788 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11789 &strict_overflow_p)))
11791 if (strict_overflow_p)
11792 fold_overflow_warning (("assuming signed overflow does not occur "
11793 "when simplifying modulus"),
11794 WARN_STRICT_OVERFLOW_MISC);
11795 return fold_convert_loc (loc, type, tem);
11798 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11799 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11800 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11801 && (TYPE_UNSIGNED (type)
11802 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11805 /* Also optimize A % (C << N) where C is a power of 2,
11806 to A & ((C << N) - 1). */
11807 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11808 c = TREE_OPERAND (arg1, 0);
11810 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11813 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11814 build_int_cst (TREE_TYPE (arg1), 1));
11815 if (strict_overflow_p)
11816 fold_overflow_warning (("assuming signed overflow does not "
11817 "occur when simplifying "
11818 "X % (power of two)"),
11819 WARN_STRICT_OVERFLOW_MISC);
11820 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11821 fold_convert_loc (loc, type, arg0),
11822 fold_convert_loc (loc, type, mask));
11830 if (integer_all_onesp (arg0))
11831 return omit_one_operand_loc (loc, type, arg0, arg1);
11835 /* Optimize -1 >> x for arithmetic right shifts. */
11836 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11837 && tree_expr_nonnegative_p (arg1))
11838 return omit_one_operand_loc (loc, type, arg0, arg1);
11839 /* ... fall through ... */
11843 if (integer_zerop (arg1))
11844 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11845 if (integer_zerop (arg0))
11846 return omit_one_operand_loc (loc, type, arg0, arg1);
11848 /* Since negative shift count is not well-defined,
11849 don't try to compute it in the compiler. */
11850 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11853 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11854 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11855 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11856 && host_integerp (TREE_OPERAND (arg0, 1), false)
11857 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11859 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11860 + TREE_INT_CST_LOW (arg1));
11862 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11863 being well defined. */
11864 if (low >= TYPE_PRECISION (type))
11866 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11867 low = low % TYPE_PRECISION (type);
11868 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11869 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11870 TREE_OPERAND (arg0, 0));
11872 low = TYPE_PRECISION (type) - 1;
11875 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11876 build_int_cst (type, low));
11879 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11880 into x & ((unsigned)-1 >> c) for unsigned types. */
11881 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11882 || (TYPE_UNSIGNED (type)
11883 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11884 && host_integerp (arg1, false)
11885 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11886 && host_integerp (TREE_OPERAND (arg0, 1), false)
11887 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11889 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11890 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11896 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11898 lshift = build_int_cst (type, -1);
11899 lshift = int_const_binop (code, lshift, arg1);
11901 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11905 /* Rewrite an LROTATE_EXPR by a constant into an
11906 RROTATE_EXPR by a new constant. */
11907 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11909 tree tem = build_int_cst (TREE_TYPE (arg1),
11910 TYPE_PRECISION (type));
11911 tem = const_binop (MINUS_EXPR, tem, arg1);
11912 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11915 /* If we have a rotate of a bit operation with the rotate count and
11916 the second operand of the bit operation both constant,
11917 permute the two operations. */
11918 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11919 && (TREE_CODE (arg0) == BIT_AND_EXPR
11920 || TREE_CODE (arg0) == BIT_IOR_EXPR
11921 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11922 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11923 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11924 fold_build2_loc (loc, code, type,
11925 TREE_OPERAND (arg0, 0), arg1),
11926 fold_build2_loc (loc, code, type,
11927 TREE_OPERAND (arg0, 1), arg1));
11929 /* Two consecutive rotates adding up to the precision of the
11930 type can be ignored. */
11931 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11932 && TREE_CODE (arg0) == RROTATE_EXPR
11933 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11934 && TREE_INT_CST_HIGH (arg1) == 0
11935 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11936 && ((TREE_INT_CST_LOW (arg1)
11937 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11938 == (unsigned int) TYPE_PRECISION (type)))
11939 return TREE_OPERAND (arg0, 0);
11941 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11942 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11943 if the latter can be further optimized. */
11944 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11945 && TREE_CODE (arg0) == BIT_AND_EXPR
11946 && TREE_CODE (arg1) == INTEGER_CST
11947 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11949 tree mask = fold_build2_loc (loc, code, type,
11950 fold_convert_loc (loc, type,
11951 TREE_OPERAND (arg0, 1)),
11953 tree shift = fold_build2_loc (loc, code, type,
11954 fold_convert_loc (loc, type,
11955 TREE_OPERAND (arg0, 0)),
11957 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11965 if (operand_equal_p (arg0, arg1, 0))
11966 return omit_one_operand_loc (loc, type, arg0, arg1);
11967 if (INTEGRAL_TYPE_P (type)
11968 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11969 return omit_one_operand_loc (loc, type, arg1, arg0);
11970 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11976 if (operand_equal_p (arg0, arg1, 0))
11977 return omit_one_operand_loc (loc, type, arg0, arg1);
11978 if (INTEGRAL_TYPE_P (type)
11979 && TYPE_MAX_VALUE (type)
11980 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11981 return omit_one_operand_loc (loc, type, arg1, arg0);
11982 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11987 case TRUTH_ANDIF_EXPR:
11988 /* Note that the operands of this must be ints
11989 and their values must be 0 or 1.
11990 ("true" is a fixed value perhaps depending on the language.) */
11991 /* If first arg is constant zero, return it. */
11992 if (integer_zerop (arg0))
11993 return fold_convert_loc (loc, type, arg0);
11994 case TRUTH_AND_EXPR:
11995 /* If either arg is constant true, drop it. */
11996 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11997 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11998 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11999 /* Preserve sequence points. */
12000 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12001 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12002 /* If second arg is constant zero, result is zero, but first arg
12003 must be evaluated. */
12004 if (integer_zerop (arg1))
12005 return omit_one_operand_loc (loc, type, arg1, arg0);
12006 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12007 case will be handled here. */
12008 if (integer_zerop (arg0))
12009 return omit_one_operand_loc (loc, type, arg0, arg1);
12011 /* !X && X is always false. */
12012 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12013 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12014 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12015 /* X && !X is always false. */
12016 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12017 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12018 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12020 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12021 means A >= Y && A != MAX, but in this case we know that
12024 if (!TREE_SIDE_EFFECTS (arg0)
12025 && !TREE_SIDE_EFFECTS (arg1))
12027 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12028 if (tem && !operand_equal_p (tem, arg0, 0))
12029 return fold_build2_loc (loc, code, type, tem, arg1);
12031 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12032 if (tem && !operand_equal_p (tem, arg1, 0))
12033 return fold_build2_loc (loc, code, type, arg0, tem);
12036 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12042 case TRUTH_ORIF_EXPR:
12043 /* Note that the operands of this must be ints
12044 and their values must be 0 or true.
12045 ("true" is a fixed value perhaps depending on the language.) */
12046 /* If first arg is constant true, return it. */
12047 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12048 return fold_convert_loc (loc, type, arg0);
12049 case TRUTH_OR_EXPR:
12050 /* If either arg is constant zero, drop it. */
12051 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12052 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12053 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12054 /* Preserve sequence points. */
12055 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12056 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12057 /* If second arg is constant true, result is true, but we must
12058 evaluate first arg. */
12059 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12060 return omit_one_operand_loc (loc, type, arg1, arg0);
12061 /* Likewise for first arg, but note this only occurs here for
12063 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12064 return omit_one_operand_loc (loc, type, arg0, arg1);
12066 /* !X || X is always true. */
12067 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12068 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12069 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12070 /* X || !X is always true. */
12071 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12072 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12073 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12075 /* (X && !Y) || (!X && Y) is X ^ Y */
12076 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12077 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12079 tree a0, a1, l0, l1, n0, n1;
12081 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12082 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12084 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12085 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12087 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12088 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12090 if ((operand_equal_p (n0, a0, 0)
12091 && operand_equal_p (n1, a1, 0))
12092 || (operand_equal_p (n0, a1, 0)
12093 && operand_equal_p (n1, a0, 0)))
12094 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12097 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12103 case TRUTH_XOR_EXPR:
12104 /* If the second arg is constant zero, drop it. */
12105 if (integer_zerop (arg1))
12106 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12107 /* If the second arg is constant true, this is a logical inversion. */
12108 if (integer_onep (arg1))
12110 /* Only call invert_truthvalue if operand is a truth value. */
12111 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12112 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12114 tem = invert_truthvalue_loc (loc, arg0);
12115 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12117 /* Identical arguments cancel to zero. */
12118 if (operand_equal_p (arg0, arg1, 0))
12119 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12121 /* !X ^ X is always true. */
12122 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12123 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12124 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12126 /* X ^ !X is always true. */
12127 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12128 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12129 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12138 tem = fold_comparison (loc, code, type, op0, op1);
12139 if (tem != NULL_TREE)
12142 /* bool_var != 0 becomes bool_var. */
12143 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12144 && code == NE_EXPR)
12145 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12147 /* bool_var == 1 becomes bool_var. */
12148 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12149 && code == EQ_EXPR)
12150 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12152 /* bool_var != 1 becomes !bool_var. */
12153 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12154 && code == NE_EXPR)
12155 return fold_convert_loc (loc, type,
12156 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12157 TREE_TYPE (arg0), arg0));
12159 /* bool_var == 0 becomes !bool_var. */
12160 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12161 && code == EQ_EXPR)
12162 return fold_convert_loc (loc, type,
12163 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12164 TREE_TYPE (arg0), arg0));
12166 /* !exp != 0 becomes !exp */
12167 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12168 && code == NE_EXPR)
12169 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12171 /* If this is an equality comparison of the address of two non-weak,
12172 unaliased symbols neither of which are extern (since we do not
12173 have access to attributes for externs), then we know the result. */
12174 if (TREE_CODE (arg0) == ADDR_EXPR
12175 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12176 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12177 && ! lookup_attribute ("alias",
12178 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12179 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12180 && TREE_CODE (arg1) == ADDR_EXPR
12181 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12182 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12183 && ! lookup_attribute ("alias",
12184 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12185 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12187 /* We know that we're looking at the address of two
12188 non-weak, unaliased, static _DECL nodes.
12190 It is both wasteful and incorrect to call operand_equal_p
12191 to compare the two ADDR_EXPR nodes. It is wasteful in that
12192 all we need to do is test pointer equality for the arguments
12193 to the two ADDR_EXPR nodes. It is incorrect to use
12194 operand_equal_p as that function is NOT equivalent to a
12195 C equality test. It can in fact return false for two
12196 objects which would test as equal using the C equality
12198 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12199 return constant_boolean_node (equal
12200 ? code == EQ_EXPR : code != EQ_EXPR,
12204 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12205 a MINUS_EXPR of a constant, we can convert it into a comparison with
12206 a revised constant as long as no overflow occurs. */
12207 if (TREE_CODE (arg1) == INTEGER_CST
12208 && (TREE_CODE (arg0) == PLUS_EXPR
12209 || TREE_CODE (arg0) == MINUS_EXPR)
12210 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12211 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12212 ? MINUS_EXPR : PLUS_EXPR,
12213 fold_convert_loc (loc, TREE_TYPE (arg0),
12215 TREE_OPERAND (arg0, 1)))
12216 && !TREE_OVERFLOW (tem))
12217 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12219 /* Similarly for a NEGATE_EXPR. */
12220 if (TREE_CODE (arg0) == NEGATE_EXPR
12221 && TREE_CODE (arg1) == INTEGER_CST
12222 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12224 && TREE_CODE (tem) == INTEGER_CST
12225 && !TREE_OVERFLOW (tem))
12226 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12228 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12229 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12230 && TREE_CODE (arg1) == INTEGER_CST
12231 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12232 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12233 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12234 fold_convert_loc (loc,
12237 TREE_OPERAND (arg0, 1)));
12239 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12240 if ((TREE_CODE (arg0) == PLUS_EXPR
12241 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12242 || TREE_CODE (arg0) == MINUS_EXPR)
12243 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12246 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12247 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12249 tree val = TREE_OPERAND (arg0, 1);
12250 return omit_two_operands_loc (loc, type,
12251 fold_build2_loc (loc, code, type,
12253 build_int_cst (TREE_TYPE (val),
12255 TREE_OPERAND (arg0, 0), arg1);
12258 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12259 if (TREE_CODE (arg0) == MINUS_EXPR
12260 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12261 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12264 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12266 return omit_two_operands_loc (loc, type,
12268 ? boolean_true_node : boolean_false_node,
12269 TREE_OPERAND (arg0, 1), arg1);
12272 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12273 for !=. Don't do this for ordered comparisons due to overflow. */
12274 if (TREE_CODE (arg0) == MINUS_EXPR
12275 && integer_zerop (arg1))
12276 return fold_build2_loc (loc, code, type,
12277 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12279 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12280 if (TREE_CODE (arg0) == ABS_EXPR
12281 && (integer_zerop (arg1) || real_zerop (arg1)))
12282 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12284 /* If this is an EQ or NE comparison with zero and ARG0 is
12285 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12286 two operations, but the latter can be done in one less insn
12287 on machines that have only two-operand insns or on which a
12288 constant cannot be the first operand. */
12289 if (TREE_CODE (arg0) == BIT_AND_EXPR
12290 && integer_zerop (arg1))
12292 tree arg00 = TREE_OPERAND (arg0, 0);
12293 tree arg01 = TREE_OPERAND (arg0, 1);
12294 if (TREE_CODE (arg00) == LSHIFT_EXPR
12295 && integer_onep (TREE_OPERAND (arg00, 0)))
12297 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12298 arg01, TREE_OPERAND (arg00, 1));
12299 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12300 build_int_cst (TREE_TYPE (arg0), 1));
12301 return fold_build2_loc (loc, code, type,
12302 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12305 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12306 && integer_onep (TREE_OPERAND (arg01, 0)))
12308 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12309 arg00, TREE_OPERAND (arg01, 1));
12310 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12311 build_int_cst (TREE_TYPE (arg0), 1));
12312 return fold_build2_loc (loc, code, type,
12313 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12318 /* If this is an NE or EQ comparison of zero against the result of a
12319 signed MOD operation whose second operand is a power of 2, make
12320 the MOD operation unsigned since it is simpler and equivalent. */
12321 if (integer_zerop (arg1)
12322 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12323 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12324 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12325 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12326 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12327 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12329 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12330 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12331 fold_convert_loc (loc, newtype,
12332 TREE_OPERAND (arg0, 0)),
12333 fold_convert_loc (loc, newtype,
12334 TREE_OPERAND (arg0, 1)));
12336 return fold_build2_loc (loc, code, type, newmod,
12337 fold_convert_loc (loc, newtype, arg1));
12340 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12341 C1 is a valid shift constant, and C2 is a power of two, i.e.
12343 if (TREE_CODE (arg0) == BIT_AND_EXPR
12344 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12345 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12347 && integer_pow2p (TREE_OPERAND (arg0, 1))
12348 && integer_zerop (arg1))
12350 tree itype = TREE_TYPE (arg0);
12351 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12352 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12354 /* Check for a valid shift count. */
12355 if (TREE_INT_CST_HIGH (arg001) == 0
12356 && TREE_INT_CST_LOW (arg001) < prec)
12358 tree arg01 = TREE_OPERAND (arg0, 1);
12359 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12360 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12361 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12362 can be rewritten as (X & (C2 << C1)) != 0. */
12363 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12365 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12366 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12367 return fold_build2_loc (loc, code, type, tem,
12368 fold_convert_loc (loc, itype, arg1));
12370 /* Otherwise, for signed (arithmetic) shifts,
12371 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12372 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12373 else if (!TYPE_UNSIGNED (itype))
12374 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12375 arg000, build_int_cst (itype, 0));
12376 /* Otherwise, of unsigned (logical) shifts,
12377 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12378 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12380 return omit_one_operand_loc (loc, type,
12381 code == EQ_EXPR ? integer_one_node
12382 : integer_zero_node,
12387 /* If we have (A & C) == C where C is a power of 2, convert this into
12388 (A & C) != 0. Similarly for NE_EXPR. */
12389 if (TREE_CODE (arg0) == BIT_AND_EXPR
12390 && integer_pow2p (TREE_OPERAND (arg0, 1))
12391 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12392 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12393 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12394 integer_zero_node));
12396 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12397 bit, then fold the expression into A < 0 or A >= 0. */
12398 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12402 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12403 Similarly for NE_EXPR. */
12404 if (TREE_CODE (arg0) == BIT_AND_EXPR
12405 && TREE_CODE (arg1) == INTEGER_CST
12406 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12408 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12409 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12410 TREE_OPERAND (arg0, 1));
12412 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12413 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12415 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12416 if (integer_nonzerop (dandnotc))
12417 return omit_one_operand_loc (loc, type, rslt, arg0);
12420 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12421 Similarly for NE_EXPR. */
12422 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12423 && TREE_CODE (arg1) == INTEGER_CST
12424 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12426 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12428 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12429 TREE_OPERAND (arg0, 1),
12430 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12431 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12432 if (integer_nonzerop (candnotd))
12433 return omit_one_operand_loc (loc, type, rslt, arg0);
12436 /* If this is a comparison of a field, we may be able to simplify it. */
12437 if ((TREE_CODE (arg0) == COMPONENT_REF
12438 || TREE_CODE (arg0) == BIT_FIELD_REF)
12439 /* Handle the constant case even without -O
12440 to make sure the warnings are given. */
12441 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12443 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12448 /* Optimize comparisons of strlen vs zero to a compare of the
12449 first character of the string vs zero. To wit,
12450 strlen(ptr) == 0 => *ptr == 0
12451 strlen(ptr) != 0 => *ptr != 0
12452 Other cases should reduce to one of these two (or a constant)
12453 due to the return value of strlen being unsigned. */
12454 if (TREE_CODE (arg0) == CALL_EXPR
12455 && integer_zerop (arg1))
12457 tree fndecl = get_callee_fndecl (arg0);
12460 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12461 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12462 && call_expr_nargs (arg0) == 1
12463 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12465 tree iref = build_fold_indirect_ref_loc (loc,
12466 CALL_EXPR_ARG (arg0, 0));
12467 return fold_build2_loc (loc, code, type, iref,
12468 build_int_cst (TREE_TYPE (iref), 0));
12472 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12473 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12474 if (TREE_CODE (arg0) == RSHIFT_EXPR
12475 && integer_zerop (arg1)
12476 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12478 tree arg00 = TREE_OPERAND (arg0, 0);
12479 tree arg01 = TREE_OPERAND (arg0, 1);
12480 tree itype = TREE_TYPE (arg00);
12481 if (TREE_INT_CST_HIGH (arg01) == 0
12482 && TREE_INT_CST_LOW (arg01)
12483 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12485 if (TYPE_UNSIGNED (itype))
12487 itype = signed_type_for (itype);
12488 arg00 = fold_convert_loc (loc, itype, arg00);
12490 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12491 type, arg00, build_int_cst (itype, 0));
12495 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12496 if (integer_zerop (arg1)
12497 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12498 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12499 TREE_OPERAND (arg0, 1));
12501 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12502 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12503 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12504 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12505 build_int_cst (TREE_TYPE (arg0), 0));
12506 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12507 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12508 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12509 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12510 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12511 build_int_cst (TREE_TYPE (arg0), 0));
12513 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12514 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12515 && TREE_CODE (arg1) == INTEGER_CST
12516 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12517 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12518 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12519 TREE_OPERAND (arg0, 1), arg1));
12521 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12522 (X & C) == 0 when C is a single bit. */
12523 if (TREE_CODE (arg0) == BIT_AND_EXPR
12524 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12525 && integer_zerop (arg1)
12526 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12528 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12529 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12530 TREE_OPERAND (arg0, 1));
12531 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12533 fold_convert_loc (loc, TREE_TYPE (arg0),
12537 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12538 constant C is a power of two, i.e. a single bit. */
12539 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12540 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12541 && integer_zerop (arg1)
12542 && integer_pow2p (TREE_OPERAND (arg0, 1))
12543 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12544 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12546 tree arg00 = TREE_OPERAND (arg0, 0);
12547 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12548 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12551 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12552 when is C is a power of two, i.e. a single bit. */
12553 if (TREE_CODE (arg0) == BIT_AND_EXPR
12554 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12555 && integer_zerop (arg1)
12556 && integer_pow2p (TREE_OPERAND (arg0, 1))
12557 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12558 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12560 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12561 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12562 arg000, TREE_OPERAND (arg0, 1));
12563 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12564 tem, build_int_cst (TREE_TYPE (tem), 0));
12567 if (integer_zerop (arg1)
12568 && tree_expr_nonzero_p (arg0))
12570 tree res = constant_boolean_node (code==NE_EXPR, type);
12571 return omit_one_operand_loc (loc, type, res, arg0);
12574 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12575 if (TREE_CODE (arg0) == NEGATE_EXPR
12576 && TREE_CODE (arg1) == NEGATE_EXPR)
12577 return fold_build2_loc (loc, code, type,
12578 TREE_OPERAND (arg0, 0),
12579 fold_convert_loc (loc, TREE_TYPE (arg0),
12580 TREE_OPERAND (arg1, 0)));
12582 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12583 if (TREE_CODE (arg0) == BIT_AND_EXPR
12584 && TREE_CODE (arg1) == BIT_AND_EXPR)
12586 tree arg00 = TREE_OPERAND (arg0, 0);
12587 tree arg01 = TREE_OPERAND (arg0, 1);
12588 tree arg10 = TREE_OPERAND (arg1, 0);
12589 tree arg11 = TREE_OPERAND (arg1, 1);
12590 tree itype = TREE_TYPE (arg0);
12592 if (operand_equal_p (arg01, arg11, 0))
12593 return fold_build2_loc (loc, code, type,
12594 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12595 fold_build2_loc (loc,
12596 BIT_XOR_EXPR, itype,
12599 build_int_cst (itype, 0));
12601 if (operand_equal_p (arg01, arg10, 0))
12602 return fold_build2_loc (loc, code, type,
12603 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12604 fold_build2_loc (loc,
12605 BIT_XOR_EXPR, itype,
12608 build_int_cst (itype, 0));
12610 if (operand_equal_p (arg00, arg11, 0))
12611 return fold_build2_loc (loc, code, type,
12612 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12613 fold_build2_loc (loc,
12614 BIT_XOR_EXPR, itype,
12617 build_int_cst (itype, 0));
12619 if (operand_equal_p (arg00, arg10, 0))
12620 return fold_build2_loc (loc, code, type,
12621 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12622 fold_build2_loc (loc,
12623 BIT_XOR_EXPR, itype,
12626 build_int_cst (itype, 0));
12629 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12630 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12632 tree arg00 = TREE_OPERAND (arg0, 0);
12633 tree arg01 = TREE_OPERAND (arg0, 1);
12634 tree arg10 = TREE_OPERAND (arg1, 0);
12635 tree arg11 = TREE_OPERAND (arg1, 1);
12636 tree itype = TREE_TYPE (arg0);
12638 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12639 operand_equal_p guarantees no side-effects so we don't need
12640 to use omit_one_operand on Z. */
12641 if (operand_equal_p (arg01, arg11, 0))
12642 return fold_build2_loc (loc, code, type, arg00,
12643 fold_convert_loc (loc, TREE_TYPE (arg00),
12645 if (operand_equal_p (arg01, arg10, 0))
12646 return fold_build2_loc (loc, code, type, arg00,
12647 fold_convert_loc (loc, TREE_TYPE (arg00),
12649 if (operand_equal_p (arg00, arg11, 0))
12650 return fold_build2_loc (loc, code, type, arg01,
12651 fold_convert_loc (loc, TREE_TYPE (arg01),
12653 if (operand_equal_p (arg00, arg10, 0))
12654 return fold_build2_loc (loc, code, type, arg01,
12655 fold_convert_loc (loc, TREE_TYPE (arg01),
12658 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12659 if (TREE_CODE (arg01) == INTEGER_CST
12660 && TREE_CODE (arg11) == INTEGER_CST)
12662 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12663 fold_convert_loc (loc, itype, arg11));
12664 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12665 return fold_build2_loc (loc, code, type, tem,
12666 fold_convert_loc (loc, itype, arg10));
12670 /* Attempt to simplify equality/inequality comparisons of complex
12671 values. Only lower the comparison if the result is known or
12672 can be simplified to a single scalar comparison. */
12673 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12674 || TREE_CODE (arg0) == COMPLEX_CST)
12675 && (TREE_CODE (arg1) == COMPLEX_EXPR
12676 || TREE_CODE (arg1) == COMPLEX_CST))
12678 tree real0, imag0, real1, imag1;
12681 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12683 real0 = TREE_OPERAND (arg0, 0);
12684 imag0 = TREE_OPERAND (arg0, 1);
12688 real0 = TREE_REALPART (arg0);
12689 imag0 = TREE_IMAGPART (arg0);
12692 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12694 real1 = TREE_OPERAND (arg1, 0);
12695 imag1 = TREE_OPERAND (arg1, 1);
12699 real1 = TREE_REALPART (arg1);
12700 imag1 = TREE_IMAGPART (arg1);
12703 rcond = fold_binary_loc (loc, code, type, real0, real1);
12704 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12706 if (integer_zerop (rcond))
12708 if (code == EQ_EXPR)
12709 return omit_two_operands_loc (loc, type, boolean_false_node,
12711 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12715 if (code == NE_EXPR)
12716 return omit_two_operands_loc (loc, type, boolean_true_node,
12718 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12722 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12723 if (icond && TREE_CODE (icond) == INTEGER_CST)
12725 if (integer_zerop (icond))
12727 if (code == EQ_EXPR)
12728 return omit_two_operands_loc (loc, type, boolean_false_node,
12730 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12734 if (code == NE_EXPR)
12735 return omit_two_operands_loc (loc, type, boolean_true_node,
12737 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12748 tem = fold_comparison (loc, code, type, op0, op1);
12749 if (tem != NULL_TREE)
12752 /* Transform comparisons of the form X +- C CMP X. */
12753 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12754 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12755 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12756 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12757 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12758 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12760 tree arg01 = TREE_OPERAND (arg0, 1);
12761 enum tree_code code0 = TREE_CODE (arg0);
12764 if (TREE_CODE (arg01) == REAL_CST)
12765 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12767 is_positive = tree_int_cst_sgn (arg01);
12769 /* (X - c) > X becomes false. */
12770 if (code == GT_EXPR
12771 && ((code0 == MINUS_EXPR && is_positive >= 0)
12772 || (code0 == PLUS_EXPR && is_positive <= 0)))
12774 if (TREE_CODE (arg01) == INTEGER_CST
12775 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12776 fold_overflow_warning (("assuming signed overflow does not "
12777 "occur when assuming that (X - c) > X "
12778 "is always false"),
12779 WARN_STRICT_OVERFLOW_ALL);
12780 return constant_boolean_node (0, type);
12783 /* Likewise (X + c) < X becomes false. */
12784 if (code == LT_EXPR
12785 && ((code0 == PLUS_EXPR && is_positive >= 0)
12786 || (code0 == MINUS_EXPR && is_positive <= 0)))
12788 if (TREE_CODE (arg01) == INTEGER_CST
12789 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12790 fold_overflow_warning (("assuming signed overflow does not "
12791 "occur when assuming that "
12792 "(X + c) < X is always false"),
12793 WARN_STRICT_OVERFLOW_ALL);
12794 return constant_boolean_node (0, type);
12797 /* Convert (X - c) <= X to true. */
12798 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12800 && ((code0 == MINUS_EXPR && is_positive >= 0)
12801 || (code0 == PLUS_EXPR && is_positive <= 0)))
12803 if (TREE_CODE (arg01) == INTEGER_CST
12804 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12805 fold_overflow_warning (("assuming signed overflow does not "
12806 "occur when assuming that "
12807 "(X - c) <= X is always true"),
12808 WARN_STRICT_OVERFLOW_ALL);
12809 return constant_boolean_node (1, type);
12812 /* Convert (X + c) >= X to true. */
12813 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12815 && ((code0 == PLUS_EXPR && is_positive >= 0)
12816 || (code0 == MINUS_EXPR && is_positive <= 0)))
12818 if (TREE_CODE (arg01) == INTEGER_CST
12819 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12820 fold_overflow_warning (("assuming signed overflow does not "
12821 "occur when assuming that "
12822 "(X + c) >= X is always true"),
12823 WARN_STRICT_OVERFLOW_ALL);
12824 return constant_boolean_node (1, type);
12827 if (TREE_CODE (arg01) == INTEGER_CST)
12829 /* Convert X + c > X and X - c < X to true for integers. */
12830 if (code == GT_EXPR
12831 && ((code0 == PLUS_EXPR && is_positive > 0)
12832 || (code0 == MINUS_EXPR && is_positive < 0)))
12834 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12835 fold_overflow_warning (("assuming signed overflow does "
12836 "not occur when assuming that "
12837 "(X + c) > X is always true"),
12838 WARN_STRICT_OVERFLOW_ALL);
12839 return constant_boolean_node (1, type);
12842 if (code == LT_EXPR
12843 && ((code0 == MINUS_EXPR && is_positive > 0)
12844 || (code0 == PLUS_EXPR && is_positive < 0)))
12846 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12847 fold_overflow_warning (("assuming signed overflow does "
12848 "not occur when assuming that "
12849 "(X - c) < X is always true"),
12850 WARN_STRICT_OVERFLOW_ALL);
12851 return constant_boolean_node (1, type);
12854 /* Convert X + c <= X and X - c >= X to false for integers. */
12855 if (code == LE_EXPR
12856 && ((code0 == PLUS_EXPR && is_positive > 0)
12857 || (code0 == MINUS_EXPR && is_positive < 0)))
12859 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12860 fold_overflow_warning (("assuming signed overflow does "
12861 "not occur when assuming that "
12862 "(X + c) <= X is always false"),
12863 WARN_STRICT_OVERFLOW_ALL);
12864 return constant_boolean_node (0, type);
12867 if (code == GE_EXPR
12868 && ((code0 == MINUS_EXPR && is_positive > 0)
12869 || (code0 == PLUS_EXPR && is_positive < 0)))
12871 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12872 fold_overflow_warning (("assuming signed overflow does "
12873 "not occur when assuming that "
12874 "(X - c) >= X is always false"),
12875 WARN_STRICT_OVERFLOW_ALL);
12876 return constant_boolean_node (0, type);
12881 /* Comparisons with the highest or lowest possible integer of
12882 the specified precision will have known values. */
12884 tree arg1_type = TREE_TYPE (arg1);
12885 unsigned int width = TYPE_PRECISION (arg1_type);
12887 if (TREE_CODE (arg1) == INTEGER_CST
12888 && width <= 2 * HOST_BITS_PER_WIDE_INT
12889 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12891 HOST_WIDE_INT signed_max_hi;
12892 unsigned HOST_WIDE_INT signed_max_lo;
12893 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12895 if (width <= HOST_BITS_PER_WIDE_INT)
12897 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12902 if (TYPE_UNSIGNED (arg1_type))
12904 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12910 max_lo = signed_max_lo;
12911 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12917 width -= HOST_BITS_PER_WIDE_INT;
12918 signed_max_lo = -1;
12919 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12924 if (TYPE_UNSIGNED (arg1_type))
12926 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12931 max_hi = signed_max_hi;
12932 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12936 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12937 && TREE_INT_CST_LOW (arg1) == max_lo)
12941 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12944 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12947 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12950 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12952 /* The GE_EXPR and LT_EXPR cases above are not normally
12953 reached because of previous transformations. */
12958 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12960 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12964 arg1 = const_binop (PLUS_EXPR, arg1,
12965 build_int_cst (TREE_TYPE (arg1), 1));
12966 return fold_build2_loc (loc, EQ_EXPR, type,
12967 fold_convert_loc (loc,
12968 TREE_TYPE (arg1), arg0),
12971 arg1 = const_binop (PLUS_EXPR, arg1,
12972 build_int_cst (TREE_TYPE (arg1), 1));
12973 return fold_build2_loc (loc, NE_EXPR, type,
12974 fold_convert_loc (loc, TREE_TYPE (arg1),
12980 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12982 && TREE_INT_CST_LOW (arg1) == min_lo)
12986 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12989 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12992 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12995 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13000 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13002 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13006 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13007 return fold_build2_loc (loc, NE_EXPR, type,
13008 fold_convert_loc (loc,
13009 TREE_TYPE (arg1), arg0),
13012 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13013 return fold_build2_loc (loc, EQ_EXPR, type,
13014 fold_convert_loc (loc, TREE_TYPE (arg1),
13021 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13022 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13023 && TYPE_UNSIGNED (arg1_type)
13024 /* We will flip the signedness of the comparison operator
13025 associated with the mode of arg1, so the sign bit is
13026 specified by this mode. Check that arg1 is the signed
13027 max associated with this sign bit. */
13028 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13029 /* signed_type does not work on pointer types. */
13030 && INTEGRAL_TYPE_P (arg1_type))
13032 /* The following case also applies to X < signed_max+1
13033 and X >= signed_max+1 because previous transformations. */
13034 if (code == LE_EXPR || code == GT_EXPR)
13037 st = signed_type_for (TREE_TYPE (arg1));
13038 return fold_build2_loc (loc,
13039 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13040 type, fold_convert_loc (loc, st, arg0),
13041 build_int_cst (st, 0));
13047 /* If we are comparing an ABS_EXPR with a constant, we can
13048 convert all the cases into explicit comparisons, but they may
13049 well not be faster than doing the ABS and one comparison.
13050 But ABS (X) <= C is a range comparison, which becomes a subtraction
13051 and a comparison, and is probably faster. */
13052 if (code == LE_EXPR
13053 && TREE_CODE (arg1) == INTEGER_CST
13054 && TREE_CODE (arg0) == ABS_EXPR
13055 && ! TREE_SIDE_EFFECTS (arg0)
13056 && (0 != (tem = negate_expr (arg1)))
13057 && TREE_CODE (tem) == INTEGER_CST
13058 && !TREE_OVERFLOW (tem))
13059 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13060 build2 (GE_EXPR, type,
13061 TREE_OPERAND (arg0, 0), tem),
13062 build2 (LE_EXPR, type,
13063 TREE_OPERAND (arg0, 0), arg1));
13065 /* Convert ABS_EXPR<x> >= 0 to true. */
13066 strict_overflow_p = false;
13067 if (code == GE_EXPR
13068 && (integer_zerop (arg1)
13069 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13070 && real_zerop (arg1)))
13071 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13073 if (strict_overflow_p)
13074 fold_overflow_warning (("assuming signed overflow does not occur "
13075 "when simplifying comparison of "
13076 "absolute value and zero"),
13077 WARN_STRICT_OVERFLOW_CONDITIONAL);
13078 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13081 /* Convert ABS_EXPR<x> < 0 to false. */
13082 strict_overflow_p = false;
13083 if (code == LT_EXPR
13084 && (integer_zerop (arg1) || real_zerop (arg1))
13085 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13087 if (strict_overflow_p)
13088 fold_overflow_warning (("assuming signed overflow does not occur "
13089 "when simplifying comparison of "
13090 "absolute value and zero"),
13091 WARN_STRICT_OVERFLOW_CONDITIONAL);
13092 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13095 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13096 and similarly for >= into !=. */
13097 if ((code == LT_EXPR || code == GE_EXPR)
13098 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13099 && TREE_CODE (arg1) == LSHIFT_EXPR
13100 && integer_onep (TREE_OPERAND (arg1, 0)))
13101 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13102 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13103 TREE_OPERAND (arg1, 1)),
13104 build_int_cst (TREE_TYPE (arg0), 0));
13106 if ((code == LT_EXPR || code == GE_EXPR)
13107 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13108 && CONVERT_EXPR_P (arg1)
13109 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13110 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13112 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13113 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13114 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13115 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13116 build_int_cst (TREE_TYPE (arg0), 0));
13121 case UNORDERED_EXPR:
13129 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13131 t1 = fold_relational_const (code, type, arg0, arg1);
13132 if (t1 != NULL_TREE)
13136 /* If the first operand is NaN, the result is constant. */
13137 if (TREE_CODE (arg0) == REAL_CST
13138 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13139 && (code != LTGT_EXPR || ! flag_trapping_math))
13141 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13142 ? integer_zero_node
13143 : integer_one_node;
13144 return omit_one_operand_loc (loc, type, t1, arg1);
13147 /* If the second operand is NaN, the result is constant. */
13148 if (TREE_CODE (arg1) == REAL_CST
13149 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13150 && (code != LTGT_EXPR || ! flag_trapping_math))
13152 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13153 ? integer_zero_node
13154 : integer_one_node;
13155 return omit_one_operand_loc (loc, type, t1, arg0);
13158 /* Simplify unordered comparison of something with itself. */
13159 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13160 && operand_equal_p (arg0, arg1, 0))
13161 return constant_boolean_node (1, type);
13163 if (code == LTGT_EXPR
13164 && !flag_trapping_math
13165 && operand_equal_p (arg0, arg1, 0))
13166 return constant_boolean_node (0, type);
13168 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13170 tree targ0 = strip_float_extensions (arg0);
13171 tree targ1 = strip_float_extensions (arg1);
13172 tree newtype = TREE_TYPE (targ0);
13174 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13175 newtype = TREE_TYPE (targ1);
13177 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13178 return fold_build2_loc (loc, code, type,
13179 fold_convert_loc (loc, newtype, targ0),
13180 fold_convert_loc (loc, newtype, targ1));
13185 case COMPOUND_EXPR:
13186 /* When pedantic, a compound expression can be neither an lvalue
13187 nor an integer constant expression. */
13188 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13190 /* Don't let (0, 0) be null pointer constant. */
13191 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13192 : fold_convert_loc (loc, type, arg1);
13193 return pedantic_non_lvalue_loc (loc, tem);
13196 if ((TREE_CODE (arg0) == REAL_CST
13197 && TREE_CODE (arg1) == REAL_CST)
13198 || (TREE_CODE (arg0) == INTEGER_CST
13199 && TREE_CODE (arg1) == INTEGER_CST))
13200 return build_complex (type, arg0, arg1);
13201 if (TREE_CODE (arg0) == REALPART_EXPR
13202 && TREE_CODE (arg1) == IMAGPART_EXPR
13203 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 0)))
13204 == TYPE_MAIN_VARIANT (type))
13205 && operand_equal_p (TREE_OPERAND (arg0, 0),
13206 TREE_OPERAND (arg1, 0), 0))
13207 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13208 TREE_OPERAND (arg1, 0));
13212 /* An ASSERT_EXPR should never be passed to fold_binary. */
13213 gcc_unreachable ();
13217 } /* switch (code) */
13220 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13221 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13225 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13227 switch (TREE_CODE (*tp))
13233 *walk_subtrees = 0;
13235 /* ... fall through ... */
13242 /* Return whether the sub-tree ST contains a label which is accessible from
13243 outside the sub-tree. */
13246 contains_label_p (tree st)
13249 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13252 /* Fold a ternary expression of code CODE and type TYPE with operands
13253 OP0, OP1, and OP2. Return the folded expression if folding is
13254 successful. Otherwise, return NULL_TREE. */
13257 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13258 tree op0, tree op1, tree op2)
13261 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13262 enum tree_code_class kind = TREE_CODE_CLASS (code);
13264 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13265 && TREE_CODE_LENGTH (code) == 3);
13267 /* Strip any conversions that don't change the mode. This is safe
13268 for every expression, except for a comparison expression because
13269 its signedness is derived from its operands. So, in the latter
13270 case, only strip conversions that don't change the signedness.
13272 Note that this is done as an internal manipulation within the
13273 constant folder, in order to find the simplest representation of
13274 the arguments so that their form can be studied. In any cases,
13275 the appropriate type conversions should be put back in the tree
13276 that will get out of the constant folder. */
13297 case COMPONENT_REF:
13298 if (TREE_CODE (arg0) == CONSTRUCTOR
13299 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13301 unsigned HOST_WIDE_INT idx;
13303 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13310 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13311 so all simple results must be passed through pedantic_non_lvalue. */
13312 if (TREE_CODE (arg0) == INTEGER_CST)
13314 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13315 tem = integer_zerop (arg0) ? op2 : op1;
13316 /* Only optimize constant conditions when the selected branch
13317 has the same type as the COND_EXPR. This avoids optimizing
13318 away "c ? x : throw", where the throw has a void type.
13319 Avoid throwing away that operand which contains label. */
13320 if ((!TREE_SIDE_EFFECTS (unused_op)
13321 || !contains_label_p (unused_op))
13322 && (! VOID_TYPE_P (TREE_TYPE (tem))
13323 || VOID_TYPE_P (type)))
13324 return pedantic_non_lvalue_loc (loc, tem);
13327 if (operand_equal_p (arg1, op2, 0))
13328 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13330 /* If we have A op B ? A : C, we may be able to convert this to a
13331 simpler expression, depending on the operation and the values
13332 of B and C. Signed zeros prevent all of these transformations,
13333 for reasons given above each one.
13335 Also try swapping the arguments and inverting the conditional. */
13336 if (COMPARISON_CLASS_P (arg0)
13337 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13338 arg1, TREE_OPERAND (arg0, 1))
13339 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13341 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13346 if (COMPARISON_CLASS_P (arg0)
13347 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13349 TREE_OPERAND (arg0, 1))
13350 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13352 location_t loc0 = expr_location_or (arg0, loc);
13353 tem = fold_truth_not_expr (loc0, arg0);
13354 if (tem && COMPARISON_CLASS_P (tem))
13356 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13362 /* If the second operand is simpler than the third, swap them
13363 since that produces better jump optimization results. */
13364 if (truth_value_p (TREE_CODE (arg0))
13365 && tree_swap_operands_p (op1, op2, false))
13367 location_t loc0 = expr_location_or (arg0, loc);
13368 /* See if this can be inverted. If it can't, possibly because
13369 it was a floating-point inequality comparison, don't do
13371 tem = fold_truth_not_expr (loc0, arg0);
13373 return fold_build3_loc (loc, code, type, tem, op2, op1);
13376 /* Convert A ? 1 : 0 to simply A. */
13377 if (integer_onep (op1)
13378 && integer_zerop (op2)
13379 /* If we try to convert OP0 to our type, the
13380 call to fold will try to move the conversion inside
13381 a COND, which will recurse. In that case, the COND_EXPR
13382 is probably the best choice, so leave it alone. */
13383 && type == TREE_TYPE (arg0))
13384 return pedantic_non_lvalue_loc (loc, arg0);
13386 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13387 over COND_EXPR in cases such as floating point comparisons. */
13388 if (integer_zerop (op1)
13389 && integer_onep (op2)
13390 && truth_value_p (TREE_CODE (arg0)))
13391 return pedantic_non_lvalue_loc (loc,
13392 fold_convert_loc (loc, type,
13393 invert_truthvalue_loc (loc,
13396 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13397 if (TREE_CODE (arg0) == LT_EXPR
13398 && integer_zerop (TREE_OPERAND (arg0, 1))
13399 && integer_zerop (op2)
13400 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13402 /* sign_bit_p only checks ARG1 bits within A's precision.
13403 If <sign bit of A> has wider type than A, bits outside
13404 of A's precision in <sign bit of A> need to be checked.
13405 If they are all 0, this optimization needs to be done
13406 in unsigned A's type, if they are all 1 in signed A's type,
13407 otherwise this can't be done. */
13408 if (TYPE_PRECISION (TREE_TYPE (tem))
13409 < TYPE_PRECISION (TREE_TYPE (arg1))
13410 && TYPE_PRECISION (TREE_TYPE (tem))
13411 < TYPE_PRECISION (type))
13413 unsigned HOST_WIDE_INT mask_lo;
13414 HOST_WIDE_INT mask_hi;
13415 int inner_width, outer_width;
13418 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13419 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13420 if (outer_width > TYPE_PRECISION (type))
13421 outer_width = TYPE_PRECISION (type);
13423 if (outer_width > HOST_BITS_PER_WIDE_INT)
13425 mask_hi = ((unsigned HOST_WIDE_INT) -1
13426 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13432 mask_lo = ((unsigned HOST_WIDE_INT) -1
13433 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13435 if (inner_width > HOST_BITS_PER_WIDE_INT)
13437 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13438 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13442 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13443 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13445 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13446 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13448 tem_type = signed_type_for (TREE_TYPE (tem));
13449 tem = fold_convert_loc (loc, tem_type, tem);
13451 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13452 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13454 tem_type = unsigned_type_for (TREE_TYPE (tem));
13455 tem = fold_convert_loc (loc, tem_type, tem);
13463 fold_convert_loc (loc, type,
13464 fold_build2_loc (loc, BIT_AND_EXPR,
13465 TREE_TYPE (tem), tem,
13466 fold_convert_loc (loc,
13471 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13472 already handled above. */
13473 if (TREE_CODE (arg0) == BIT_AND_EXPR
13474 && integer_onep (TREE_OPERAND (arg0, 1))
13475 && integer_zerop (op2)
13476 && integer_pow2p (arg1))
13478 tree tem = TREE_OPERAND (arg0, 0);
13480 if (TREE_CODE (tem) == RSHIFT_EXPR
13481 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13482 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13483 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13484 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13485 TREE_OPERAND (tem, 0), arg1);
13488 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13489 is probably obsolete because the first operand should be a
13490 truth value (that's why we have the two cases above), but let's
13491 leave it in until we can confirm this for all front-ends. */
13492 if (integer_zerop (op2)
13493 && TREE_CODE (arg0) == NE_EXPR
13494 && integer_zerop (TREE_OPERAND (arg0, 1))
13495 && integer_pow2p (arg1)
13496 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13497 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13498 arg1, OEP_ONLY_CONST))
13499 return pedantic_non_lvalue_loc (loc,
13500 fold_convert_loc (loc, type,
13501 TREE_OPERAND (arg0, 0)));
13503 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13504 if (integer_zerop (op2)
13505 && truth_value_p (TREE_CODE (arg0))
13506 && truth_value_p (TREE_CODE (arg1)))
13507 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13508 fold_convert_loc (loc, type, arg0),
13511 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13512 if (integer_onep (op2)
13513 && truth_value_p (TREE_CODE (arg0))
13514 && truth_value_p (TREE_CODE (arg1)))
13516 location_t loc0 = expr_location_or (arg0, loc);
13517 /* Only perform transformation if ARG0 is easily inverted. */
13518 tem = fold_truth_not_expr (loc0, arg0);
13520 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13521 fold_convert_loc (loc, type, tem),
13525 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13526 if (integer_zerop (arg1)
13527 && truth_value_p (TREE_CODE (arg0))
13528 && truth_value_p (TREE_CODE (op2)))
13530 location_t loc0 = expr_location_or (arg0, loc);
13531 /* Only perform transformation if ARG0 is easily inverted. */
13532 tem = fold_truth_not_expr (loc0, arg0);
13534 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13535 fold_convert_loc (loc, type, tem),
13539 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13540 if (integer_onep (arg1)
13541 && truth_value_p (TREE_CODE (arg0))
13542 && truth_value_p (TREE_CODE (op2)))
13543 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13544 fold_convert_loc (loc, type, arg0),
13550 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13551 of fold_ternary on them. */
13552 gcc_unreachable ();
13554 case BIT_FIELD_REF:
13555 if ((TREE_CODE (arg0) == VECTOR_CST
13556 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13557 && type == TREE_TYPE (TREE_TYPE (arg0)))
13559 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13560 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13563 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13564 && (idx % width) == 0
13565 && (idx = idx / width)
13566 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13568 tree elements = NULL_TREE;
13570 if (TREE_CODE (arg0) == VECTOR_CST)
13571 elements = TREE_VECTOR_CST_ELTS (arg0);
13574 unsigned HOST_WIDE_INT idx;
13577 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13578 elements = tree_cons (NULL_TREE, value, elements);
13580 while (idx-- > 0 && elements)
13581 elements = TREE_CHAIN (elements);
13583 return TREE_VALUE (elements);
13585 return build_zero_cst (type);
13589 /* A bit-field-ref that referenced the full argument can be stripped. */
13590 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13591 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13592 && integer_zerop (op2))
13593 return fold_convert_loc (loc, type, arg0);
13598 /* For integers we can decompose the FMA if possible. */
13599 if (TREE_CODE (arg0) == INTEGER_CST
13600 && TREE_CODE (arg1) == INTEGER_CST)
13601 return fold_build2_loc (loc, PLUS_EXPR, type,
13602 const_binop (MULT_EXPR, arg0, arg1), arg2);
13603 if (integer_zerop (arg2))
13604 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13606 return fold_fma (loc, type, arg0, arg1, arg2);
13610 } /* switch (code) */
13613 /* Perform constant folding and related simplification of EXPR.
13614 The related simplifications include x*1 => x, x*0 => 0, etc.,
13615 and application of the associative law.
13616 NOP_EXPR conversions may be removed freely (as long as we
13617 are careful not to change the type of the overall expression).
13618 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13619 but we can constant-fold them if they have constant operands. */
13621 #ifdef ENABLE_FOLD_CHECKING
13622 # define fold(x) fold_1 (x)
13623 static tree fold_1 (tree);
13629 const tree t = expr;
13630 enum tree_code code = TREE_CODE (t);
13631 enum tree_code_class kind = TREE_CODE_CLASS (code);
13633 location_t loc = EXPR_LOCATION (expr);
13635 /* Return right away if a constant. */
13636 if (kind == tcc_constant)
13639 /* CALL_EXPR-like objects with variable numbers of operands are
13640 treated specially. */
13641 if (kind == tcc_vl_exp)
13643 if (code == CALL_EXPR)
13645 tem = fold_call_expr (loc, expr, false);
13646 return tem ? tem : expr;
13651 if (IS_EXPR_CODE_CLASS (kind))
13653 tree type = TREE_TYPE (t);
13654 tree op0, op1, op2;
13656 switch (TREE_CODE_LENGTH (code))
13659 op0 = TREE_OPERAND (t, 0);
13660 tem = fold_unary_loc (loc, code, type, op0);
13661 return tem ? tem : expr;
13663 op0 = TREE_OPERAND (t, 0);
13664 op1 = TREE_OPERAND (t, 1);
13665 tem = fold_binary_loc (loc, code, type, op0, op1);
13666 return tem ? tem : expr;
13668 op0 = TREE_OPERAND (t, 0);
13669 op1 = TREE_OPERAND (t, 1);
13670 op2 = TREE_OPERAND (t, 2);
13671 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13672 return tem ? tem : expr;
13682 tree op0 = TREE_OPERAND (t, 0);
13683 tree op1 = TREE_OPERAND (t, 1);
13685 if (TREE_CODE (op1) == INTEGER_CST
13686 && TREE_CODE (op0) == CONSTRUCTOR
13687 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13689 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13690 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13691 unsigned HOST_WIDE_INT begin = 0;
13693 /* Find a matching index by means of a binary search. */
13694 while (begin != end)
13696 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13697 tree index = VEC_index (constructor_elt, elts, middle)->index;
13699 if (TREE_CODE (index) == INTEGER_CST
13700 && tree_int_cst_lt (index, op1))
13701 begin = middle + 1;
13702 else if (TREE_CODE (index) == INTEGER_CST
13703 && tree_int_cst_lt (op1, index))
13705 else if (TREE_CODE (index) == RANGE_EXPR
13706 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13707 begin = middle + 1;
13708 else if (TREE_CODE (index) == RANGE_EXPR
13709 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13712 return VEC_index (constructor_elt, elts, middle)->value;
13720 return fold (DECL_INITIAL (t));
13724 } /* switch (code) */
13727 #ifdef ENABLE_FOLD_CHECKING
13730 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13731 static void fold_check_failed (const_tree, const_tree);
13732 void print_fold_checksum (const_tree);
13734 /* When --enable-checking=fold, compute a digest of expr before
13735 and after actual fold call to see if fold did not accidentally
13736 change original expr. */
13742 struct md5_ctx ctx;
13743 unsigned char checksum_before[16], checksum_after[16];
13746 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13747 md5_init_ctx (&ctx);
13748 fold_checksum_tree (expr, &ctx, ht);
13749 md5_finish_ctx (&ctx, checksum_before);
13752 ret = fold_1 (expr);
13754 md5_init_ctx (&ctx);
13755 fold_checksum_tree (expr, &ctx, ht);
13756 md5_finish_ctx (&ctx, checksum_after);
13759 if (memcmp (checksum_before, checksum_after, 16))
13760 fold_check_failed (expr, ret);
13766 print_fold_checksum (const_tree expr)
13768 struct md5_ctx ctx;
13769 unsigned char checksum[16], cnt;
13772 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13773 md5_init_ctx (&ctx);
13774 fold_checksum_tree (expr, &ctx, ht);
13775 md5_finish_ctx (&ctx, checksum);
13777 for (cnt = 0; cnt < 16; ++cnt)
13778 fprintf (stderr, "%02x", checksum[cnt]);
13779 putc ('\n', stderr);
13783 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13785 internal_error ("fold check: original tree changed by fold");
13789 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13792 enum tree_code code;
13793 union tree_node buf;
13798 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13799 <= sizeof (struct tree_function_decl))
13800 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13803 slot = (void **) htab_find_slot (ht, expr, INSERT);
13806 *slot = CONST_CAST_TREE (expr);
13807 code = TREE_CODE (expr);
13808 if (TREE_CODE_CLASS (code) == tcc_declaration
13809 && DECL_ASSEMBLER_NAME_SET_P (expr))
13811 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13812 memcpy ((char *) &buf, expr, tree_size (expr));
13813 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13814 expr = (tree) &buf;
13816 else if (TREE_CODE_CLASS (code) == tcc_type
13817 && (TYPE_POINTER_TO (expr)
13818 || TYPE_REFERENCE_TO (expr)
13819 || TYPE_CACHED_VALUES_P (expr)
13820 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13821 || TYPE_NEXT_VARIANT (expr)))
13823 /* Allow these fields to be modified. */
13825 memcpy ((char *) &buf, expr, tree_size (expr));
13826 expr = tmp = (tree) &buf;
13827 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13828 TYPE_POINTER_TO (tmp) = NULL;
13829 TYPE_REFERENCE_TO (tmp) = NULL;
13830 TYPE_NEXT_VARIANT (tmp) = NULL;
13831 if (TYPE_CACHED_VALUES_P (tmp))
13833 TYPE_CACHED_VALUES_P (tmp) = 0;
13834 TYPE_CACHED_VALUES (tmp) = NULL;
13837 md5_process_bytes (expr, tree_size (expr), ctx);
13838 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13839 if (TREE_CODE_CLASS (code) != tcc_type
13840 && TREE_CODE_CLASS (code) != tcc_declaration
13841 && code != TREE_LIST
13842 && code != SSA_NAME
13843 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13844 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13845 switch (TREE_CODE_CLASS (code))
13851 md5_process_bytes (TREE_STRING_POINTER (expr),
13852 TREE_STRING_LENGTH (expr), ctx);
13855 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13856 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13859 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13865 case tcc_exceptional:
13869 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13870 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13871 expr = TREE_CHAIN (expr);
13872 goto recursive_label;
13875 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13876 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13882 case tcc_expression:
13883 case tcc_reference:
13884 case tcc_comparison:
13887 case tcc_statement:
13889 len = TREE_OPERAND_LENGTH (expr);
13890 for (i = 0; i < len; ++i)
13891 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13893 case tcc_declaration:
13894 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13895 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13896 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13898 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13899 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13900 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13901 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13902 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13904 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13905 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13907 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13909 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13910 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13911 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13915 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13916 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13917 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13918 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13919 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13920 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13921 if (INTEGRAL_TYPE_P (expr)
13922 || SCALAR_FLOAT_TYPE_P (expr))
13924 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13925 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13927 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13928 if (TREE_CODE (expr) == RECORD_TYPE
13929 || TREE_CODE (expr) == UNION_TYPE
13930 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13931 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13932 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13939 /* Helper function for outputting the checksum of a tree T. When
13940 debugging with gdb, you can "define mynext" to be "next" followed
13941 by "call debug_fold_checksum (op0)", then just trace down till the
13944 DEBUG_FUNCTION void
13945 debug_fold_checksum (const_tree t)
13948 unsigned char checksum[16];
13949 struct md5_ctx ctx;
13950 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13952 md5_init_ctx (&ctx);
13953 fold_checksum_tree (t, &ctx, ht);
13954 md5_finish_ctx (&ctx, checksum);
13957 for (i = 0; i < 16; i++)
13958 fprintf (stderr, "%d ", checksum[i]);
13960 fprintf (stderr, "\n");
13965 /* Fold a unary tree expression with code CODE of type TYPE with an
13966 operand OP0. LOC is the location of the resulting expression.
13967 Return a folded expression if successful. Otherwise, return a tree
13968 expression with code CODE of type TYPE with an operand OP0. */
13971 fold_build1_stat_loc (location_t loc,
13972 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13975 #ifdef ENABLE_FOLD_CHECKING
13976 unsigned char checksum_before[16], checksum_after[16];
13977 struct md5_ctx ctx;
13980 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13981 md5_init_ctx (&ctx);
13982 fold_checksum_tree (op0, &ctx, ht);
13983 md5_finish_ctx (&ctx, checksum_before);
13987 tem = fold_unary_loc (loc, code, type, op0);
13989 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13991 #ifdef ENABLE_FOLD_CHECKING
13992 md5_init_ctx (&ctx);
13993 fold_checksum_tree (op0, &ctx, ht);
13994 md5_finish_ctx (&ctx, checksum_after);
13997 if (memcmp (checksum_before, checksum_after, 16))
13998 fold_check_failed (op0, tem);
14003 /* Fold a binary tree expression with code CODE of type TYPE with
14004 operands OP0 and OP1. LOC is the location of the resulting
14005 expression. Return a folded expression if successful. Otherwise,
14006 return a tree expression with code CODE of type TYPE with operands
14010 fold_build2_stat_loc (location_t loc,
14011 enum tree_code code, tree type, tree op0, tree op1
14015 #ifdef ENABLE_FOLD_CHECKING
14016 unsigned char checksum_before_op0[16],
14017 checksum_before_op1[16],
14018 checksum_after_op0[16],
14019 checksum_after_op1[16];
14020 struct md5_ctx ctx;
14023 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14024 md5_init_ctx (&ctx);
14025 fold_checksum_tree (op0, &ctx, ht);
14026 md5_finish_ctx (&ctx, checksum_before_op0);
14029 md5_init_ctx (&ctx);
14030 fold_checksum_tree (op1, &ctx, ht);
14031 md5_finish_ctx (&ctx, checksum_before_op1);
14035 tem = fold_binary_loc (loc, code, type, op0, op1);
14037 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14039 #ifdef ENABLE_FOLD_CHECKING
14040 md5_init_ctx (&ctx);
14041 fold_checksum_tree (op0, &ctx, ht);
14042 md5_finish_ctx (&ctx, checksum_after_op0);
14045 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14046 fold_check_failed (op0, tem);
14048 md5_init_ctx (&ctx);
14049 fold_checksum_tree (op1, &ctx, ht);
14050 md5_finish_ctx (&ctx, checksum_after_op1);
14053 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14054 fold_check_failed (op1, tem);
14059 /* Fold a ternary tree expression with code CODE of type TYPE with
14060 operands OP0, OP1, and OP2. Return a folded expression if
14061 successful. Otherwise, return a tree expression with code CODE of
14062 type TYPE with operands OP0, OP1, and OP2. */
14065 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14066 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14069 #ifdef ENABLE_FOLD_CHECKING
14070 unsigned char checksum_before_op0[16],
14071 checksum_before_op1[16],
14072 checksum_before_op2[16],
14073 checksum_after_op0[16],
14074 checksum_after_op1[16],
14075 checksum_after_op2[16];
14076 struct md5_ctx ctx;
14079 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14080 md5_init_ctx (&ctx);
14081 fold_checksum_tree (op0, &ctx, ht);
14082 md5_finish_ctx (&ctx, checksum_before_op0);
14085 md5_init_ctx (&ctx);
14086 fold_checksum_tree (op1, &ctx, ht);
14087 md5_finish_ctx (&ctx, checksum_before_op1);
14090 md5_init_ctx (&ctx);
14091 fold_checksum_tree (op2, &ctx, ht);
14092 md5_finish_ctx (&ctx, checksum_before_op2);
14096 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14097 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14099 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14101 #ifdef ENABLE_FOLD_CHECKING
14102 md5_init_ctx (&ctx);
14103 fold_checksum_tree (op0, &ctx, ht);
14104 md5_finish_ctx (&ctx, checksum_after_op0);
14107 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14108 fold_check_failed (op0, tem);
14110 md5_init_ctx (&ctx);
14111 fold_checksum_tree (op1, &ctx, ht);
14112 md5_finish_ctx (&ctx, checksum_after_op1);
14115 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14116 fold_check_failed (op1, tem);
14118 md5_init_ctx (&ctx);
14119 fold_checksum_tree (op2, &ctx, ht);
14120 md5_finish_ctx (&ctx, checksum_after_op2);
14123 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14124 fold_check_failed (op2, tem);
14129 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14130 arguments in ARGARRAY, and a null static chain.
14131 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14132 of type TYPE from the given operands as constructed by build_call_array. */
14135 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14136 int nargs, tree *argarray)
14139 #ifdef ENABLE_FOLD_CHECKING
14140 unsigned char checksum_before_fn[16],
14141 checksum_before_arglist[16],
14142 checksum_after_fn[16],
14143 checksum_after_arglist[16];
14144 struct md5_ctx ctx;
14148 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14149 md5_init_ctx (&ctx);
14150 fold_checksum_tree (fn, &ctx, ht);
14151 md5_finish_ctx (&ctx, checksum_before_fn);
14154 md5_init_ctx (&ctx);
14155 for (i = 0; i < nargs; i++)
14156 fold_checksum_tree (argarray[i], &ctx, ht);
14157 md5_finish_ctx (&ctx, checksum_before_arglist);
14161 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14163 #ifdef ENABLE_FOLD_CHECKING
14164 md5_init_ctx (&ctx);
14165 fold_checksum_tree (fn, &ctx, ht);
14166 md5_finish_ctx (&ctx, checksum_after_fn);
14169 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14170 fold_check_failed (fn, tem);
14172 md5_init_ctx (&ctx);
14173 for (i = 0; i < nargs; i++)
14174 fold_checksum_tree (argarray[i], &ctx, ht);
14175 md5_finish_ctx (&ctx, checksum_after_arglist);
14178 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14179 fold_check_failed (NULL_TREE, tem);
14184 /* Perform constant folding and related simplification of initializer
14185 expression EXPR. These behave identically to "fold_buildN" but ignore
14186 potential run-time traps and exceptions that fold must preserve. */
14188 #define START_FOLD_INIT \
14189 int saved_signaling_nans = flag_signaling_nans;\
14190 int saved_trapping_math = flag_trapping_math;\
14191 int saved_rounding_math = flag_rounding_math;\
14192 int saved_trapv = flag_trapv;\
14193 int saved_folding_initializer = folding_initializer;\
14194 flag_signaling_nans = 0;\
14195 flag_trapping_math = 0;\
14196 flag_rounding_math = 0;\
14198 folding_initializer = 1;
14200 #define END_FOLD_INIT \
14201 flag_signaling_nans = saved_signaling_nans;\
14202 flag_trapping_math = saved_trapping_math;\
14203 flag_rounding_math = saved_rounding_math;\
14204 flag_trapv = saved_trapv;\
14205 folding_initializer = saved_folding_initializer;
14208 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14209 tree type, tree op)
14214 result = fold_build1_loc (loc, code, type, op);
14221 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14222 tree type, tree op0, tree op1)
14227 result = fold_build2_loc (loc, code, type, op0, op1);
14234 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14235 tree type, tree op0, tree op1, tree op2)
14240 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14247 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14248 int nargs, tree *argarray)
14253 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14259 #undef START_FOLD_INIT
14260 #undef END_FOLD_INIT
14262 /* Determine if first argument is a multiple of second argument. Return 0 if
14263 it is not, or we cannot easily determined it to be.
14265 An example of the sort of thing we care about (at this point; this routine
14266 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14267 fold cases do now) is discovering that
14269 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14275 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14277 This code also handles discovering that
14279 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14281 is a multiple of 8 so we don't have to worry about dealing with a
14282 possible remainder.
14284 Note that we *look* inside a SAVE_EXPR only to determine how it was
14285 calculated; it is not safe for fold to do much of anything else with the
14286 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14287 at run time. For example, the latter example above *cannot* be implemented
14288 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14289 evaluation time of the original SAVE_EXPR is not necessarily the same at
14290 the time the new expression is evaluated. The only optimization of this
14291 sort that would be valid is changing
14293 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14297 SAVE_EXPR (I) * SAVE_EXPR (J)
14299 (where the same SAVE_EXPR (J) is used in the original and the
14300 transformed version). */
14303 multiple_of_p (tree type, const_tree top, const_tree bottom)
14305 if (operand_equal_p (top, bottom, 0))
14308 if (TREE_CODE (type) != INTEGER_TYPE)
14311 switch (TREE_CODE (top))
14314 /* Bitwise and provides a power of two multiple. If the mask is
14315 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14316 if (!integer_pow2p (bottom))
14321 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14322 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14326 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14327 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14330 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14334 op1 = TREE_OPERAND (top, 1);
14335 /* const_binop may not detect overflow correctly,
14336 so check for it explicitly here. */
14337 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14338 > TREE_INT_CST_LOW (op1)
14339 && TREE_INT_CST_HIGH (op1) == 0
14340 && 0 != (t1 = fold_convert (type,
14341 const_binop (LSHIFT_EXPR,
14344 && !TREE_OVERFLOW (t1))
14345 return multiple_of_p (type, t1, bottom);
14350 /* Can't handle conversions from non-integral or wider integral type. */
14351 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14352 || (TYPE_PRECISION (type)
14353 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14356 /* .. fall through ... */
14359 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14362 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14363 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14366 if (TREE_CODE (bottom) != INTEGER_CST
14367 || integer_zerop (bottom)
14368 || (TYPE_UNSIGNED (type)
14369 && (tree_int_cst_sgn (top) < 0
14370 || tree_int_cst_sgn (bottom) < 0)))
14372 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14380 /* Return true if CODE or TYPE is known to be non-negative. */
14383 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14385 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14386 && truth_value_p (code))
14387 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14388 have a signed:1 type (where the value is -1 and 0). */
14393 /* Return true if (CODE OP0) is known to be non-negative. If the return
14394 value is based on the assumption that signed overflow is undefined,
14395 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14396 *STRICT_OVERFLOW_P. */
14399 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14400 bool *strict_overflow_p)
14402 if (TYPE_UNSIGNED (type))
14408 /* We can't return 1 if flag_wrapv is set because
14409 ABS_EXPR<INT_MIN> = INT_MIN. */
14410 if (!INTEGRAL_TYPE_P (type))
14412 if (TYPE_OVERFLOW_UNDEFINED (type))
14414 *strict_overflow_p = true;
14419 case NON_LVALUE_EXPR:
14421 case FIX_TRUNC_EXPR:
14422 return tree_expr_nonnegative_warnv_p (op0,
14423 strict_overflow_p);
14427 tree inner_type = TREE_TYPE (op0);
14428 tree outer_type = type;
14430 if (TREE_CODE (outer_type) == REAL_TYPE)
14432 if (TREE_CODE (inner_type) == REAL_TYPE)
14433 return tree_expr_nonnegative_warnv_p (op0,
14434 strict_overflow_p);
14435 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14437 if (TYPE_UNSIGNED (inner_type))
14439 return tree_expr_nonnegative_warnv_p (op0,
14440 strict_overflow_p);
14443 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14445 if (TREE_CODE (inner_type) == REAL_TYPE)
14446 return tree_expr_nonnegative_warnv_p (op0,
14447 strict_overflow_p);
14448 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14449 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14450 && TYPE_UNSIGNED (inner_type);
14456 return tree_simple_nonnegative_warnv_p (code, type);
14459 /* We don't know sign of `t', so be conservative and return false. */
14463 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14464 value is based on the assumption that signed overflow is undefined,
14465 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14466 *STRICT_OVERFLOW_P. */
14469 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14470 tree op1, bool *strict_overflow_p)
14472 if (TYPE_UNSIGNED (type))
14477 case POINTER_PLUS_EXPR:
14479 if (FLOAT_TYPE_P (type))
14480 return (tree_expr_nonnegative_warnv_p (op0,
14482 && tree_expr_nonnegative_warnv_p (op1,
14483 strict_overflow_p));
14485 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14486 both unsigned and at least 2 bits shorter than the result. */
14487 if (TREE_CODE (type) == INTEGER_TYPE
14488 && TREE_CODE (op0) == NOP_EXPR
14489 && TREE_CODE (op1) == NOP_EXPR)
14491 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14492 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14493 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14494 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14496 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14497 TYPE_PRECISION (inner2)) + 1;
14498 return prec < TYPE_PRECISION (type);
14504 if (FLOAT_TYPE_P (type))
14506 /* x * x for floating point x is always non-negative. */
14507 if (operand_equal_p (op0, op1, 0))
14509 return (tree_expr_nonnegative_warnv_p (op0,
14511 && tree_expr_nonnegative_warnv_p (op1,
14512 strict_overflow_p));
14515 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14516 both unsigned and their total bits is shorter than the result. */
14517 if (TREE_CODE (type) == INTEGER_TYPE
14518 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14519 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14521 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14522 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14524 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14525 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14528 bool unsigned0 = TYPE_UNSIGNED (inner0);
14529 bool unsigned1 = TYPE_UNSIGNED (inner1);
14531 if (TREE_CODE (op0) == INTEGER_CST)
14532 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14534 if (TREE_CODE (op1) == INTEGER_CST)
14535 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14537 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14538 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14540 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14541 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14542 : TYPE_PRECISION (inner0);
14544 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14545 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14546 : TYPE_PRECISION (inner1);
14548 return precision0 + precision1 < TYPE_PRECISION (type);
14555 return (tree_expr_nonnegative_warnv_p (op0,
14557 || tree_expr_nonnegative_warnv_p (op1,
14558 strict_overflow_p));
14564 case TRUNC_DIV_EXPR:
14565 case CEIL_DIV_EXPR:
14566 case FLOOR_DIV_EXPR:
14567 case ROUND_DIV_EXPR:
14568 return (tree_expr_nonnegative_warnv_p (op0,
14570 && tree_expr_nonnegative_warnv_p (op1,
14571 strict_overflow_p));
14573 case TRUNC_MOD_EXPR:
14574 case CEIL_MOD_EXPR:
14575 case FLOOR_MOD_EXPR:
14576 case ROUND_MOD_EXPR:
14577 return tree_expr_nonnegative_warnv_p (op0,
14578 strict_overflow_p);
14580 return tree_simple_nonnegative_warnv_p (code, type);
14583 /* We don't know sign of `t', so be conservative and return false. */
14587 /* Return true if T is known to be non-negative. If the return
14588 value is based on the assumption that signed overflow is undefined,
14589 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14590 *STRICT_OVERFLOW_P. */
14593 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14595 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14598 switch (TREE_CODE (t))
14601 return tree_int_cst_sgn (t) >= 0;
14604 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14607 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14610 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14612 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14613 strict_overflow_p));
14615 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14618 /* We don't know sign of `t', so be conservative and return false. */
14622 /* Return true if T is known to be non-negative. If the return
14623 value is based on the assumption that signed overflow is undefined,
14624 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14625 *STRICT_OVERFLOW_P. */
14628 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14629 tree arg0, tree arg1, bool *strict_overflow_p)
14631 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14632 switch (DECL_FUNCTION_CODE (fndecl))
14634 CASE_FLT_FN (BUILT_IN_ACOS):
14635 CASE_FLT_FN (BUILT_IN_ACOSH):
14636 CASE_FLT_FN (BUILT_IN_CABS):
14637 CASE_FLT_FN (BUILT_IN_COSH):
14638 CASE_FLT_FN (BUILT_IN_ERFC):
14639 CASE_FLT_FN (BUILT_IN_EXP):
14640 CASE_FLT_FN (BUILT_IN_EXP10):
14641 CASE_FLT_FN (BUILT_IN_EXP2):
14642 CASE_FLT_FN (BUILT_IN_FABS):
14643 CASE_FLT_FN (BUILT_IN_FDIM):
14644 CASE_FLT_FN (BUILT_IN_HYPOT):
14645 CASE_FLT_FN (BUILT_IN_POW10):
14646 CASE_INT_FN (BUILT_IN_FFS):
14647 CASE_INT_FN (BUILT_IN_PARITY):
14648 CASE_INT_FN (BUILT_IN_POPCOUNT):
14649 case BUILT_IN_BSWAP32:
14650 case BUILT_IN_BSWAP64:
14654 CASE_FLT_FN (BUILT_IN_SQRT):
14655 /* sqrt(-0.0) is -0.0. */
14656 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14658 return tree_expr_nonnegative_warnv_p (arg0,
14659 strict_overflow_p);
14661 CASE_FLT_FN (BUILT_IN_ASINH):
14662 CASE_FLT_FN (BUILT_IN_ATAN):
14663 CASE_FLT_FN (BUILT_IN_ATANH):
14664 CASE_FLT_FN (BUILT_IN_CBRT):
14665 CASE_FLT_FN (BUILT_IN_CEIL):
14666 CASE_FLT_FN (BUILT_IN_ERF):
14667 CASE_FLT_FN (BUILT_IN_EXPM1):
14668 CASE_FLT_FN (BUILT_IN_FLOOR):
14669 CASE_FLT_FN (BUILT_IN_FMOD):
14670 CASE_FLT_FN (BUILT_IN_FREXP):
14671 CASE_FLT_FN (BUILT_IN_ICEIL):
14672 CASE_FLT_FN (BUILT_IN_IFLOOR):
14673 CASE_FLT_FN (BUILT_IN_IRINT):
14674 CASE_FLT_FN (BUILT_IN_IROUND):
14675 CASE_FLT_FN (BUILT_IN_LCEIL):
14676 CASE_FLT_FN (BUILT_IN_LDEXP):
14677 CASE_FLT_FN (BUILT_IN_LFLOOR):
14678 CASE_FLT_FN (BUILT_IN_LLCEIL):
14679 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14680 CASE_FLT_FN (BUILT_IN_LLRINT):
14681 CASE_FLT_FN (BUILT_IN_LLROUND):
14682 CASE_FLT_FN (BUILT_IN_LRINT):
14683 CASE_FLT_FN (BUILT_IN_LROUND):
14684 CASE_FLT_FN (BUILT_IN_MODF):
14685 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14686 CASE_FLT_FN (BUILT_IN_RINT):
14687 CASE_FLT_FN (BUILT_IN_ROUND):
14688 CASE_FLT_FN (BUILT_IN_SCALB):
14689 CASE_FLT_FN (BUILT_IN_SCALBLN):
14690 CASE_FLT_FN (BUILT_IN_SCALBN):
14691 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14692 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14693 CASE_FLT_FN (BUILT_IN_SINH):
14694 CASE_FLT_FN (BUILT_IN_TANH):
14695 CASE_FLT_FN (BUILT_IN_TRUNC):
14696 /* True if the 1st argument is nonnegative. */
14697 return tree_expr_nonnegative_warnv_p (arg0,
14698 strict_overflow_p);
14700 CASE_FLT_FN (BUILT_IN_FMAX):
14701 /* True if the 1st OR 2nd arguments are nonnegative. */
14702 return (tree_expr_nonnegative_warnv_p (arg0,
14704 || (tree_expr_nonnegative_warnv_p (arg1,
14705 strict_overflow_p)));
14707 CASE_FLT_FN (BUILT_IN_FMIN):
14708 /* True if the 1st AND 2nd arguments are nonnegative. */
14709 return (tree_expr_nonnegative_warnv_p (arg0,
14711 && (tree_expr_nonnegative_warnv_p (arg1,
14712 strict_overflow_p)));
14714 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14715 /* True if the 2nd argument is nonnegative. */
14716 return tree_expr_nonnegative_warnv_p (arg1,
14717 strict_overflow_p);
14719 CASE_FLT_FN (BUILT_IN_POWI):
14720 /* True if the 1st argument is nonnegative or the second
14721 argument is an even integer. */
14722 if (TREE_CODE (arg1) == INTEGER_CST
14723 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14725 return tree_expr_nonnegative_warnv_p (arg0,
14726 strict_overflow_p);
14728 CASE_FLT_FN (BUILT_IN_POW):
14729 /* True if the 1st argument is nonnegative or the second
14730 argument is an even integer valued real. */
14731 if (TREE_CODE (arg1) == REAL_CST)
14736 c = TREE_REAL_CST (arg1);
14737 n = real_to_integer (&c);
14740 REAL_VALUE_TYPE cint;
14741 real_from_integer (&cint, VOIDmode, n,
14742 n < 0 ? -1 : 0, 0);
14743 if (real_identical (&c, &cint))
14747 return tree_expr_nonnegative_warnv_p (arg0,
14748 strict_overflow_p);
14753 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14757 /* Return true if T is known to be non-negative. If the return
14758 value is based on the assumption that signed overflow is undefined,
14759 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14760 *STRICT_OVERFLOW_P. */
14763 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14765 enum tree_code code = TREE_CODE (t);
14766 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14773 tree temp = TARGET_EXPR_SLOT (t);
14774 t = TARGET_EXPR_INITIAL (t);
14776 /* If the initializer is non-void, then it's a normal expression
14777 that will be assigned to the slot. */
14778 if (!VOID_TYPE_P (t))
14779 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14781 /* Otherwise, the initializer sets the slot in some way. One common
14782 way is an assignment statement at the end of the initializer. */
14785 if (TREE_CODE (t) == BIND_EXPR)
14786 t = expr_last (BIND_EXPR_BODY (t));
14787 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14788 || TREE_CODE (t) == TRY_CATCH_EXPR)
14789 t = expr_last (TREE_OPERAND (t, 0));
14790 else if (TREE_CODE (t) == STATEMENT_LIST)
14795 if (TREE_CODE (t) == MODIFY_EXPR
14796 && TREE_OPERAND (t, 0) == temp)
14797 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14798 strict_overflow_p);
14805 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14806 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14808 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14809 get_callee_fndecl (t),
14812 strict_overflow_p);
14814 case COMPOUND_EXPR:
14816 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14817 strict_overflow_p);
14819 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14820 strict_overflow_p);
14822 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14823 strict_overflow_p);
14826 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14830 /* We don't know sign of `t', so be conservative and return false. */
14834 /* Return true if T is known to be non-negative. If the return
14835 value is based on the assumption that signed overflow is undefined,
14836 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14837 *STRICT_OVERFLOW_P. */
14840 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14842 enum tree_code code;
14843 if (t == error_mark_node)
14846 code = TREE_CODE (t);
14847 switch (TREE_CODE_CLASS (code))
14850 case tcc_comparison:
14851 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14853 TREE_OPERAND (t, 0),
14854 TREE_OPERAND (t, 1),
14855 strict_overflow_p);
14858 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14860 TREE_OPERAND (t, 0),
14861 strict_overflow_p);
14864 case tcc_declaration:
14865 case tcc_reference:
14866 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14874 case TRUTH_AND_EXPR:
14875 case TRUTH_OR_EXPR:
14876 case TRUTH_XOR_EXPR:
14877 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14879 TREE_OPERAND (t, 0),
14880 TREE_OPERAND (t, 1),
14881 strict_overflow_p);
14882 case TRUTH_NOT_EXPR:
14883 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14885 TREE_OPERAND (t, 0),
14886 strict_overflow_p);
14893 case WITH_SIZE_EXPR:
14895 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14898 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14902 /* Return true if `t' is known to be non-negative. Handle warnings
14903 about undefined signed overflow. */
14906 tree_expr_nonnegative_p (tree t)
14908 bool ret, strict_overflow_p;
14910 strict_overflow_p = false;
14911 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14912 if (strict_overflow_p)
14913 fold_overflow_warning (("assuming signed overflow does not occur when "
14914 "determining that expression is always "
14916 WARN_STRICT_OVERFLOW_MISC);
14921 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14922 For floating point we further ensure that T is not denormal.
14923 Similar logic is present in nonzero_address in rtlanal.h.
14925 If the return value is based on the assumption that signed overflow
14926 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14927 change *STRICT_OVERFLOW_P. */
14930 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14931 bool *strict_overflow_p)
14936 return tree_expr_nonzero_warnv_p (op0,
14937 strict_overflow_p);
14941 tree inner_type = TREE_TYPE (op0);
14942 tree outer_type = type;
14944 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14945 && tree_expr_nonzero_warnv_p (op0,
14946 strict_overflow_p));
14950 case NON_LVALUE_EXPR:
14951 return tree_expr_nonzero_warnv_p (op0,
14952 strict_overflow_p);
14961 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14962 For floating point we further ensure that T is not denormal.
14963 Similar logic is present in nonzero_address in rtlanal.h.
14965 If the return value is based on the assumption that signed overflow
14966 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14967 change *STRICT_OVERFLOW_P. */
14970 tree_binary_nonzero_warnv_p (enum tree_code code,
14973 tree op1, bool *strict_overflow_p)
14975 bool sub_strict_overflow_p;
14978 case POINTER_PLUS_EXPR:
14980 if (TYPE_OVERFLOW_UNDEFINED (type))
14982 /* With the presence of negative values it is hard
14983 to say something. */
14984 sub_strict_overflow_p = false;
14985 if (!tree_expr_nonnegative_warnv_p (op0,
14986 &sub_strict_overflow_p)
14987 || !tree_expr_nonnegative_warnv_p (op1,
14988 &sub_strict_overflow_p))
14990 /* One of operands must be positive and the other non-negative. */
14991 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14992 overflows, on a twos-complement machine the sum of two
14993 nonnegative numbers can never be zero. */
14994 return (tree_expr_nonzero_warnv_p (op0,
14996 || tree_expr_nonzero_warnv_p (op1,
14997 strict_overflow_p));
15002 if (TYPE_OVERFLOW_UNDEFINED (type))
15004 if (tree_expr_nonzero_warnv_p (op0,
15006 && tree_expr_nonzero_warnv_p (op1,
15007 strict_overflow_p))
15009 *strict_overflow_p = true;
15016 sub_strict_overflow_p = false;
15017 if (tree_expr_nonzero_warnv_p (op0,
15018 &sub_strict_overflow_p)
15019 && tree_expr_nonzero_warnv_p (op1,
15020 &sub_strict_overflow_p))
15022 if (sub_strict_overflow_p)
15023 *strict_overflow_p = true;
15028 sub_strict_overflow_p = false;
15029 if (tree_expr_nonzero_warnv_p (op0,
15030 &sub_strict_overflow_p))
15032 if (sub_strict_overflow_p)
15033 *strict_overflow_p = true;
15035 /* When both operands are nonzero, then MAX must be too. */
15036 if (tree_expr_nonzero_warnv_p (op1,
15037 strict_overflow_p))
15040 /* MAX where operand 0 is positive is positive. */
15041 return tree_expr_nonnegative_warnv_p (op0,
15042 strict_overflow_p);
15044 /* MAX where operand 1 is positive is positive. */
15045 else if (tree_expr_nonzero_warnv_p (op1,
15046 &sub_strict_overflow_p)
15047 && tree_expr_nonnegative_warnv_p (op1,
15048 &sub_strict_overflow_p))
15050 if (sub_strict_overflow_p)
15051 *strict_overflow_p = true;
15057 return (tree_expr_nonzero_warnv_p (op1,
15059 || tree_expr_nonzero_warnv_p (op0,
15060 strict_overflow_p));
15069 /* Return true when T is an address and is known to be nonzero.
15070 For floating point we further ensure that T is not denormal.
15071 Similar logic is present in nonzero_address in rtlanal.h.
15073 If the return value is based on the assumption that signed overflow
15074 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15075 change *STRICT_OVERFLOW_P. */
15078 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15080 bool sub_strict_overflow_p;
15081 switch (TREE_CODE (t))
15084 return !integer_zerop (t);
15088 tree base = TREE_OPERAND (t, 0);
15089 if (!DECL_P (base))
15090 base = get_base_address (base);
15095 /* Weak declarations may link to NULL. Other things may also be NULL
15096 so protect with -fdelete-null-pointer-checks; but not variables
15097 allocated on the stack. */
15099 && (flag_delete_null_pointer_checks
15100 || (DECL_CONTEXT (base)
15101 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15102 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15103 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15105 /* Constants are never weak. */
15106 if (CONSTANT_CLASS_P (base))
15113 sub_strict_overflow_p = false;
15114 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15115 &sub_strict_overflow_p)
15116 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15117 &sub_strict_overflow_p))
15119 if (sub_strict_overflow_p)
15120 *strict_overflow_p = true;
15131 /* Return true when T is an address and is known to be nonzero.
15132 For floating point we further ensure that T is not denormal.
15133 Similar logic is present in nonzero_address in rtlanal.h.
15135 If the return value is based on the assumption that signed overflow
15136 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15137 change *STRICT_OVERFLOW_P. */
15140 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15142 tree type = TREE_TYPE (t);
15143 enum tree_code code;
15145 /* Doing something useful for floating point would need more work. */
15146 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15149 code = TREE_CODE (t);
15150 switch (TREE_CODE_CLASS (code))
15153 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15154 strict_overflow_p);
15156 case tcc_comparison:
15157 return tree_binary_nonzero_warnv_p (code, type,
15158 TREE_OPERAND (t, 0),
15159 TREE_OPERAND (t, 1),
15160 strict_overflow_p);
15162 case tcc_declaration:
15163 case tcc_reference:
15164 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15172 case TRUTH_NOT_EXPR:
15173 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15174 strict_overflow_p);
15176 case TRUTH_AND_EXPR:
15177 case TRUTH_OR_EXPR:
15178 case TRUTH_XOR_EXPR:
15179 return tree_binary_nonzero_warnv_p (code, type,
15180 TREE_OPERAND (t, 0),
15181 TREE_OPERAND (t, 1),
15182 strict_overflow_p);
15189 case WITH_SIZE_EXPR:
15191 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15193 case COMPOUND_EXPR:
15196 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15197 strict_overflow_p);
15200 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15201 strict_overflow_p);
15204 return alloca_call_p (t);
15212 /* Return true when T is an address and is known to be nonzero.
15213 Handle warnings about undefined signed overflow. */
15216 tree_expr_nonzero_p (tree t)
15218 bool ret, strict_overflow_p;
15220 strict_overflow_p = false;
15221 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15222 if (strict_overflow_p)
15223 fold_overflow_warning (("assuming signed overflow does not occur when "
15224 "determining that expression is always "
15226 WARN_STRICT_OVERFLOW_MISC);
15230 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15231 attempt to fold the expression to a constant without modifying TYPE,
15234 If the expression could be simplified to a constant, then return
15235 the constant. If the expression would not be simplified to a
15236 constant, then return NULL_TREE. */
15239 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15241 tree tem = fold_binary (code, type, op0, op1);
15242 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15245 /* Given the components of a unary expression CODE, TYPE and OP0,
15246 attempt to fold the expression to a constant without modifying
15249 If the expression could be simplified to a constant, then return
15250 the constant. If the expression would not be simplified to a
15251 constant, then return NULL_TREE. */
15254 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15256 tree tem = fold_unary (code, type, op0);
15257 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15260 /* If EXP represents referencing an element in a constant string
15261 (either via pointer arithmetic or array indexing), return the
15262 tree representing the value accessed, otherwise return NULL. */
15265 fold_read_from_constant_string (tree exp)
15267 if ((TREE_CODE (exp) == INDIRECT_REF
15268 || TREE_CODE (exp) == ARRAY_REF)
15269 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15271 tree exp1 = TREE_OPERAND (exp, 0);
15274 location_t loc = EXPR_LOCATION (exp);
15276 if (TREE_CODE (exp) == INDIRECT_REF)
15277 string = string_constant (exp1, &index);
15280 tree low_bound = array_ref_low_bound (exp);
15281 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15283 /* Optimize the special-case of a zero lower bound.
15285 We convert the low_bound to sizetype to avoid some problems
15286 with constant folding. (E.g. suppose the lower bound is 1,
15287 and its mode is QI. Without the conversion,l (ARRAY
15288 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15289 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15290 if (! integer_zerop (low_bound))
15291 index = size_diffop_loc (loc, index,
15292 fold_convert_loc (loc, sizetype, low_bound));
15298 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15299 && TREE_CODE (string) == STRING_CST
15300 && TREE_CODE (index) == INTEGER_CST
15301 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15302 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15304 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15305 return build_int_cst_type (TREE_TYPE (exp),
15306 (TREE_STRING_POINTER (string)
15307 [TREE_INT_CST_LOW (index)]));
15312 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15313 an integer constant, real, or fixed-point constant.
15315 TYPE is the type of the result. */
15318 fold_negate_const (tree arg0, tree type)
15320 tree t = NULL_TREE;
15322 switch (TREE_CODE (arg0))
15326 double_int val = tree_to_double_int (arg0);
15327 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15329 t = force_fit_type_double (type, val, 1,
15330 (overflow | TREE_OVERFLOW (arg0))
15331 && !TYPE_UNSIGNED (type));
15336 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15341 FIXED_VALUE_TYPE f;
15342 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15343 &(TREE_FIXED_CST (arg0)), NULL,
15344 TYPE_SATURATING (type));
15345 t = build_fixed (type, f);
15346 /* Propagate overflow flags. */
15347 if (overflow_p | TREE_OVERFLOW (arg0))
15348 TREE_OVERFLOW (t) = 1;
15353 gcc_unreachable ();
15359 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15360 an integer constant or real constant.
15362 TYPE is the type of the result. */
15365 fold_abs_const (tree arg0, tree type)
15367 tree t = NULL_TREE;
15369 switch (TREE_CODE (arg0))
15373 double_int val = tree_to_double_int (arg0);
15375 /* If the value is unsigned or non-negative, then the absolute value
15376 is the same as the ordinary value. */
15377 if (TYPE_UNSIGNED (type)
15378 || !double_int_negative_p (val))
15381 /* If the value is negative, then the absolute value is
15387 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15388 t = force_fit_type_double (type, val, -1,
15389 overflow | TREE_OVERFLOW (arg0));
15395 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15396 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15402 gcc_unreachable ();
15408 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15409 constant. TYPE is the type of the result. */
15412 fold_not_const (const_tree arg0, tree type)
15416 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15418 val = double_int_not (tree_to_double_int (arg0));
15419 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15422 /* Given CODE, a relational operator, the target type, TYPE and two
15423 constant operands OP0 and OP1, return the result of the
15424 relational operation. If the result is not a compile time
15425 constant, then return NULL_TREE. */
15428 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15430 int result, invert;
15432 /* From here on, the only cases we handle are when the result is
15433 known to be a constant. */
15435 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15437 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15438 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15440 /* Handle the cases where either operand is a NaN. */
15441 if (real_isnan (c0) || real_isnan (c1))
15451 case UNORDERED_EXPR:
15465 if (flag_trapping_math)
15471 gcc_unreachable ();
15474 return constant_boolean_node (result, type);
15477 return constant_boolean_node (real_compare (code, c0, c1), type);
15480 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15482 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15483 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15484 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15487 /* Handle equality/inequality of complex constants. */
15488 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15490 tree rcond = fold_relational_const (code, type,
15491 TREE_REALPART (op0),
15492 TREE_REALPART (op1));
15493 tree icond = fold_relational_const (code, type,
15494 TREE_IMAGPART (op0),
15495 TREE_IMAGPART (op1));
15496 if (code == EQ_EXPR)
15497 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15498 else if (code == NE_EXPR)
15499 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15504 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15506 To compute GT, swap the arguments and do LT.
15507 To compute GE, do LT and invert the result.
15508 To compute LE, swap the arguments, do LT and invert the result.
15509 To compute NE, do EQ and invert the result.
15511 Therefore, the code below must handle only EQ and LT. */
15513 if (code == LE_EXPR || code == GT_EXPR)
15518 code = swap_tree_comparison (code);
15521 /* Note that it is safe to invert for real values here because we
15522 have already handled the one case that it matters. */
15525 if (code == NE_EXPR || code == GE_EXPR)
15528 code = invert_tree_comparison (code, false);
15531 /* Compute a result for LT or EQ if args permit;
15532 Otherwise return T. */
15533 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15535 if (code == EQ_EXPR)
15536 result = tree_int_cst_equal (op0, op1);
15537 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15538 result = INT_CST_LT_UNSIGNED (op0, op1);
15540 result = INT_CST_LT (op0, op1);
15547 return constant_boolean_node (result, type);
15550 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15551 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15555 fold_build_cleanup_point_expr (tree type, tree expr)
15557 /* If the expression does not have side effects then we don't have to wrap
15558 it with a cleanup point expression. */
15559 if (!TREE_SIDE_EFFECTS (expr))
15562 /* If the expression is a return, check to see if the expression inside the
15563 return has no side effects or the right hand side of the modify expression
15564 inside the return. If either don't have side effects set we don't need to
15565 wrap the expression in a cleanup point expression. Note we don't check the
15566 left hand side of the modify because it should always be a return decl. */
15567 if (TREE_CODE (expr) == RETURN_EXPR)
15569 tree op = TREE_OPERAND (expr, 0);
15570 if (!op || !TREE_SIDE_EFFECTS (op))
15572 op = TREE_OPERAND (op, 1);
15573 if (!TREE_SIDE_EFFECTS (op))
15577 return build1 (CLEANUP_POINT_EXPR, type, expr);
15580 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15581 of an indirection through OP0, or NULL_TREE if no simplification is
15585 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15591 subtype = TREE_TYPE (sub);
15592 if (!POINTER_TYPE_P (subtype))
15595 if (TREE_CODE (sub) == ADDR_EXPR)
15597 tree op = TREE_OPERAND (sub, 0);
15598 tree optype = TREE_TYPE (op);
15599 /* *&CONST_DECL -> to the value of the const decl. */
15600 if (TREE_CODE (op) == CONST_DECL)
15601 return DECL_INITIAL (op);
15602 /* *&p => p; make sure to handle *&"str"[cst] here. */
15603 if (type == optype)
15605 tree fop = fold_read_from_constant_string (op);
15611 /* *(foo *)&fooarray => fooarray[0] */
15612 else if (TREE_CODE (optype) == ARRAY_TYPE
15613 && type == TREE_TYPE (optype)
15614 && (!in_gimple_form
15615 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15617 tree type_domain = TYPE_DOMAIN (optype);
15618 tree min_val = size_zero_node;
15619 if (type_domain && TYPE_MIN_VALUE (type_domain))
15620 min_val = TYPE_MIN_VALUE (type_domain);
15622 && TREE_CODE (min_val) != INTEGER_CST)
15624 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15625 NULL_TREE, NULL_TREE);
15627 /* *(foo *)&complexfoo => __real__ complexfoo */
15628 else if (TREE_CODE (optype) == COMPLEX_TYPE
15629 && type == TREE_TYPE (optype))
15630 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15631 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15632 else if (TREE_CODE (optype) == VECTOR_TYPE
15633 && type == TREE_TYPE (optype))
15635 tree part_width = TYPE_SIZE (type);
15636 tree index = bitsize_int (0);
15637 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15641 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15642 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15644 tree op00 = TREE_OPERAND (sub, 0);
15645 tree op01 = TREE_OPERAND (sub, 1);
15648 if (TREE_CODE (op00) == ADDR_EXPR)
15651 op00 = TREE_OPERAND (op00, 0);
15652 op00type = TREE_TYPE (op00);
15654 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15655 if (TREE_CODE (op00type) == VECTOR_TYPE
15656 && type == TREE_TYPE (op00type))
15658 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15659 tree part_width = TYPE_SIZE (type);
15660 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15661 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15662 tree index = bitsize_int (indexi);
15664 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15665 return fold_build3_loc (loc,
15666 BIT_FIELD_REF, type, op00,
15667 part_width, index);
15670 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15671 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15672 && type == TREE_TYPE (op00type))
15674 tree size = TYPE_SIZE_UNIT (type);
15675 if (tree_int_cst_equal (size, op01))
15676 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15678 /* ((foo *)&fooarray)[1] => fooarray[1] */
15679 else if (TREE_CODE (op00type) == ARRAY_TYPE
15680 && type == TREE_TYPE (op00type))
15682 tree type_domain = TYPE_DOMAIN (op00type);
15683 tree min_val = size_zero_node;
15684 if (type_domain && TYPE_MIN_VALUE (type_domain))
15685 min_val = TYPE_MIN_VALUE (type_domain);
15686 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15687 TYPE_SIZE_UNIT (type));
15688 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15689 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15690 NULL_TREE, NULL_TREE);
15695 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15696 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15697 && type == TREE_TYPE (TREE_TYPE (subtype))
15698 && (!in_gimple_form
15699 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15702 tree min_val = size_zero_node;
15703 sub = build_fold_indirect_ref_loc (loc, sub);
15704 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15705 if (type_domain && TYPE_MIN_VALUE (type_domain))
15706 min_val = TYPE_MIN_VALUE (type_domain);
15708 && TREE_CODE (min_val) != INTEGER_CST)
15710 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15717 /* Builds an expression for an indirection through T, simplifying some
15721 build_fold_indirect_ref_loc (location_t loc, tree t)
15723 tree type = TREE_TYPE (TREE_TYPE (t));
15724 tree sub = fold_indirect_ref_1 (loc, type, t);
15729 return build1_loc (loc, INDIRECT_REF, type, t);
15732 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15735 fold_indirect_ref_loc (location_t loc, tree t)
15737 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15745 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15746 whose result is ignored. The type of the returned tree need not be
15747 the same as the original expression. */
15750 fold_ignored_result (tree t)
15752 if (!TREE_SIDE_EFFECTS (t))
15753 return integer_zero_node;
15756 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15759 t = TREE_OPERAND (t, 0);
15763 case tcc_comparison:
15764 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15765 t = TREE_OPERAND (t, 0);
15766 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15767 t = TREE_OPERAND (t, 1);
15772 case tcc_expression:
15773 switch (TREE_CODE (t))
15775 case COMPOUND_EXPR:
15776 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15778 t = TREE_OPERAND (t, 0);
15782 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15783 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15785 t = TREE_OPERAND (t, 0);
15798 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15799 This can only be applied to objects of a sizetype. */
15802 round_up_loc (location_t loc, tree value, int divisor)
15804 tree div = NULL_TREE;
15806 gcc_assert (divisor > 0);
15810 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15811 have to do anything. Only do this when we are not given a const,
15812 because in that case, this check is more expensive than just
15814 if (TREE_CODE (value) != INTEGER_CST)
15816 div = build_int_cst (TREE_TYPE (value), divisor);
15818 if (multiple_of_p (TREE_TYPE (value), value, div))
15822 /* If divisor is a power of two, simplify this to bit manipulation. */
15823 if (divisor == (divisor & -divisor))
15825 if (TREE_CODE (value) == INTEGER_CST)
15827 double_int val = tree_to_double_int (value);
15830 if ((val.low & (divisor - 1)) == 0)
15833 overflow_p = TREE_OVERFLOW (value);
15834 val.low &= ~(divisor - 1);
15835 val.low += divisor;
15843 return force_fit_type_double (TREE_TYPE (value), val,
15850 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15851 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15852 t = build_int_cst (TREE_TYPE (value), -divisor);
15853 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15859 div = build_int_cst (TREE_TYPE (value), divisor);
15860 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15861 value = size_binop_loc (loc, MULT_EXPR, value, div);
15867 /* Likewise, but round down. */
15870 round_down_loc (location_t loc, tree value, int divisor)
15872 tree div = NULL_TREE;
15874 gcc_assert (divisor > 0);
15878 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15879 have to do anything. Only do this when we are not given a const,
15880 because in that case, this check is more expensive than just
15882 if (TREE_CODE (value) != INTEGER_CST)
15884 div = build_int_cst (TREE_TYPE (value), divisor);
15886 if (multiple_of_p (TREE_TYPE (value), value, div))
15890 /* If divisor is a power of two, simplify this to bit manipulation. */
15891 if (divisor == (divisor & -divisor))
15895 t = build_int_cst (TREE_TYPE (value), -divisor);
15896 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15901 div = build_int_cst (TREE_TYPE (value), divisor);
15902 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15903 value = size_binop_loc (loc, MULT_EXPR, value, div);
15909 /* Returns the pointer to the base of the object addressed by EXP and
15910 extracts the information about the offset of the access, storing it
15911 to PBITPOS and POFFSET. */
15914 split_address_to_core_and_offset (tree exp,
15915 HOST_WIDE_INT *pbitpos, tree *poffset)
15918 enum machine_mode mode;
15919 int unsignedp, volatilep;
15920 HOST_WIDE_INT bitsize;
15921 location_t loc = EXPR_LOCATION (exp);
15923 if (TREE_CODE (exp) == ADDR_EXPR)
15925 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15926 poffset, &mode, &unsignedp, &volatilep,
15928 core = build_fold_addr_expr_loc (loc, core);
15934 *poffset = NULL_TREE;
15940 /* Returns true if addresses of E1 and E2 differ by a constant, false
15941 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15944 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15947 HOST_WIDE_INT bitpos1, bitpos2;
15948 tree toffset1, toffset2, tdiff, type;
15950 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15951 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15953 if (bitpos1 % BITS_PER_UNIT != 0
15954 || bitpos2 % BITS_PER_UNIT != 0
15955 || !operand_equal_p (core1, core2, 0))
15958 if (toffset1 && toffset2)
15960 type = TREE_TYPE (toffset1);
15961 if (type != TREE_TYPE (toffset2))
15962 toffset2 = fold_convert (type, toffset2);
15964 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15965 if (!cst_and_fits_in_hwi (tdiff))
15968 *diff = int_cst_value (tdiff);
15970 else if (toffset1 || toffset2)
15972 /* If only one of the offsets is non-constant, the difference cannot
15979 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15983 /* Simplify the floating point expression EXP when the sign of the
15984 result is not significant. Return NULL_TREE if no simplification
15988 fold_strip_sign_ops (tree exp)
15991 location_t loc = EXPR_LOCATION (exp);
15993 switch (TREE_CODE (exp))
15997 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15998 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16002 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16004 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16005 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16006 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16007 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16008 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16009 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16012 case COMPOUND_EXPR:
16013 arg0 = TREE_OPERAND (exp, 0);
16014 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16016 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16020 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16021 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16023 return fold_build3_loc (loc,
16024 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16025 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16026 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16031 const enum built_in_function fcode = builtin_mathfn_code (exp);
16034 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16035 /* Strip copysign function call, return the 1st argument. */
16036 arg0 = CALL_EXPR_ARG (exp, 0);
16037 arg1 = CALL_EXPR_ARG (exp, 1);
16038 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16041 /* Strip sign ops from the argument of "odd" math functions. */
16042 if (negate_mathfn_p (fcode))
16044 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16046 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);