1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
164 SET_EXPR_LOCATION (x, loc);
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
194 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
198 quo = double_int_divmod (tree_to_double_int (arg1),
199 tree_to_double_int (arg2),
202 if (double_int_zero_p (rem))
203 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
217 static int fold_deferring_overflow_warnings;
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
224 static const char* fold_deferred_overflow_warning;
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
229 static enum warn_strict_overflow_code fold_deferred_overflow_code;
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
235 fold_defer_overflow_warnings (void)
237 ++fold_deferring_overflow_warnings;
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
250 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
255 gcc_assert (fold_deferring_overflow_warnings > 0);
256 --fold_deferring_overflow_warnings;
257 if (fold_deferring_overflow_warnings > 0)
259 if (fold_deferred_overflow_warning != NULL
261 && code < (int) fold_deferred_overflow_code)
262 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
266 warnmsg = fold_deferred_overflow_warning;
267 fold_deferred_overflow_warning = NULL;
269 if (!issue || warnmsg == NULL)
272 if (gimple_no_warning_p (stmt))
275 /* Use the smallest code level when deciding to issue the
277 if (code == 0 || code > (int) fold_deferred_overflow_code)
278 code = fold_deferred_overflow_code;
280 if (!issue_strict_overflow_warning (code))
284 locus = input_location;
286 locus = gimple_location (stmt);
287 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
290 /* Stop deferring overflow warnings, ignoring any deferred
294 fold_undefer_and_ignore_overflow_warnings (void)
296 fold_undefer_overflow_warnings (false, NULL, 0);
299 /* Whether we are deferring overflow warnings. */
302 fold_deferring_overflow_warnings_p (void)
304 return fold_deferring_overflow_warnings > 0;
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
311 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
313 if (fold_deferring_overflow_warnings > 0)
315 if (fold_deferred_overflow_warning == NULL
316 || wc < fold_deferred_overflow_code)
318 fold_deferred_overflow_warning = gmsgid;
319 fold_deferred_overflow_code = wc;
322 else if (issue_strict_overflow_warning (wc))
323 warning (OPT_Wstrict_overflow, gmsgid);
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
330 negate_mathfn_p (enum built_in_function code)
334 CASE_FLT_FN (BUILT_IN_ASIN):
335 CASE_FLT_FN (BUILT_IN_ASINH):
336 CASE_FLT_FN (BUILT_IN_ATAN):
337 CASE_FLT_FN (BUILT_IN_ATANH):
338 CASE_FLT_FN (BUILT_IN_CASIN):
339 CASE_FLT_FN (BUILT_IN_CASINH):
340 CASE_FLT_FN (BUILT_IN_CATAN):
341 CASE_FLT_FN (BUILT_IN_CATANH):
342 CASE_FLT_FN (BUILT_IN_CBRT):
343 CASE_FLT_FN (BUILT_IN_CPROJ):
344 CASE_FLT_FN (BUILT_IN_CSIN):
345 CASE_FLT_FN (BUILT_IN_CSINH):
346 CASE_FLT_FN (BUILT_IN_CTAN):
347 CASE_FLT_FN (BUILT_IN_CTANH):
348 CASE_FLT_FN (BUILT_IN_ERF):
349 CASE_FLT_FN (BUILT_IN_LLROUND):
350 CASE_FLT_FN (BUILT_IN_LROUND):
351 CASE_FLT_FN (BUILT_IN_ROUND):
352 CASE_FLT_FN (BUILT_IN_SIN):
353 CASE_FLT_FN (BUILT_IN_SINH):
354 CASE_FLT_FN (BUILT_IN_TAN):
355 CASE_FLT_FN (BUILT_IN_TANH):
356 CASE_FLT_FN (BUILT_IN_TRUNC):
359 CASE_FLT_FN (BUILT_IN_LLRINT):
360 CASE_FLT_FN (BUILT_IN_LRINT):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT):
362 CASE_FLT_FN (BUILT_IN_RINT):
363 return !flag_rounding_math;
371 /* Check whether we may negate an integer constant T without causing
375 may_negate_without_overflow_p (const_tree t)
377 unsigned HOST_WIDE_INT val;
381 gcc_assert (TREE_CODE (t) == INTEGER_CST);
383 type = TREE_TYPE (t);
384 if (TYPE_UNSIGNED (type))
387 prec = TYPE_PRECISION (type);
388 if (prec > HOST_BITS_PER_WIDE_INT)
390 if (TREE_INT_CST_LOW (t) != 0)
392 prec -= HOST_BITS_PER_WIDE_INT;
393 val = TREE_INT_CST_HIGH (t);
396 val = TREE_INT_CST_LOW (t);
397 if (prec < HOST_BITS_PER_WIDE_INT)
398 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
399 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
406 negate_expr_p (tree t)
413 type = TREE_TYPE (t);
416 switch (TREE_CODE (t))
419 if (TYPE_OVERFLOW_WRAPS (type))
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
438 return negate_expr_p (TREE_REALPART (t))
439 && negate_expr_p (TREE_IMAGPART (t));
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
446 return negate_expr_p (TREE_OPERAND (t, 0));
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
492 return negate_expr_p (TREE_OPERAND (t, 1))
493 || negate_expr_p (TREE_OPERAND (t, 0));
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type) == REAL_TYPE)
499 tree tem = strip_float_extensions (t);
501 return negate_expr_p (tem);
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t)))
508 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
515 tree op1 = TREE_OPERAND (t, 1);
516 if (TREE_INT_CST_HIGH (op1) == 0
517 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
518 == TREE_INT_CST_LOW (op1))
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
535 fold_negate_expr (location_t loc, tree t)
537 tree type = TREE_TYPE (t);
540 switch (TREE_CODE (t))
542 /* Convert - (~A) to A + 1. */
544 if (INTEGRAL_TYPE_P (type))
545 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
546 build_int_cst (type, 1));
550 tem = fold_negate_const (t, type);
551 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
552 || !TYPE_OVERFLOW_TRAPS (type))
557 tem = fold_negate_const (t, type);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
564 tem = fold_negate_const (t, type);
569 tree rpart = negate_expr (TREE_REALPART (t));
570 tree ipart = negate_expr (TREE_IMAGPART (t));
572 if ((TREE_CODE (rpart) == REAL_CST
573 && TREE_CODE (ipart) == REAL_CST)
574 || (TREE_CODE (rpart) == INTEGER_CST
575 && TREE_CODE (ipart) == INTEGER_CST))
576 return build_complex (type, rpart, ipart);
581 if (negate_expr_p (t))
582 return fold_build2_loc (loc, COMPLEX_EXPR, type,
583 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
584 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
588 if (negate_expr_p (t))
589 return fold_build1_loc (loc, CONJ_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
594 return TREE_OPERAND (t, 0);
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t, 1))
602 && reorder_operands_p (TREE_OPERAND (t, 0),
603 TREE_OPERAND (t, 1)))
605 tem = negate_expr (TREE_OPERAND (t, 1));
606 return fold_build2_loc (loc, MINUS_EXPR, type,
607 tem, TREE_OPERAND (t, 0));
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t, 0)))
613 tem = negate_expr (TREE_OPERAND (t, 0));
614 return fold_build2_loc (loc, MINUS_EXPR, type,
615 tem, TREE_OPERAND (t, 1));
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
624 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
630 if (TYPE_UNSIGNED (type))
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 TREE_OPERAND (t, 0), negate_expr (tem));
642 tem = TREE_OPERAND (t, 0);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 negate_expr (tem), TREE_OPERAND (t, 1));
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
659 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
661 const char * const warnmsg = G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
666 if (INTEGRAL_TYPE_P (type)
667 && (TREE_CODE (tem) != INTEGER_CST
668 || integer_onep (tem)))
669 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 TREE_OPERAND (t, 0), negate_expr (tem));
673 tem = TREE_OPERAND (t, 0);
674 if (negate_expr_p (tem))
676 if (INTEGRAL_TYPE_P (type)
677 && (TREE_CODE (tem) != INTEGER_CST
678 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
679 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 negate_expr (tem), TREE_OPERAND (t, 1));
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type) == REAL_TYPE)
690 tem = strip_float_extensions (t);
691 if (tem != t && negate_expr_p (tem))
692 return fold_convert_loc (loc, type, negate_expr (tem));
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t))
699 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
703 fndecl = get_callee_fndecl (t);
704 arg = negate_expr (CALL_EXPR_ARG (t, 0));
705 return build_call_expr_loc (loc, fndecl, 1, arg);
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
713 tree op1 = TREE_OPERAND (t, 1);
714 if (TREE_INT_CST_HIGH (op1) == 0
715 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
716 == TREE_INT_CST_LOW (op1))
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
752 tem = fold_negate_expr (loc, t);
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
779 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
780 tree *minus_litp, int negate_p)
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
804 tree op0 = TREE_OPERAND (in, 0);
805 tree op1 = TREE_OPERAND (in, 1);
806 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 || TREE_CODE (op0) == FIXED_CST)
812 *litp = op0, op0 = 0;
813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 || TREE_CODE (op1) == FIXED_CST)
815 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
817 if (op0 != 0 && TREE_CONSTANT (op0))
818 *conp = op0, op0 = 0;
819 else if (op1 != 0 && TREE_CONSTANT (op1))
820 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0 != 0 && op1 != 0)
829 var = op1, neg_var_p = neg1_p;
831 /* Now do any needed negations. */
833 *minus_litp = *litp, *litp = 0;
835 *conp = negate_expr (*conp);
837 var = negate_expr (var);
839 else if (TREE_CONSTANT (in))
847 *minus_litp = *litp, *litp = 0;
848 else if (*minus_litp)
849 *litp = *minus_litp, *minus_litp = 0;
850 *conp = negate_expr (*conp);
851 var = negate_expr (var);
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
863 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
874 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
876 if (code == PLUS_EXPR)
878 if (TREE_CODE (t1) == NEGATE_EXPR)
879 return build2_loc (loc, MINUS_EXPR, type,
880 fold_convert_loc (loc, type, t2),
881 fold_convert_loc (loc, type,
882 TREE_OPERAND (t1, 0)));
883 else if (TREE_CODE (t2) == NEGATE_EXPR)
884 return build2_loc (loc, MINUS_EXPR, type,
885 fold_convert_loc (loc, type, t1),
886 fold_convert_loc (loc, type,
887 TREE_OPERAND (t2, 0)));
888 else if (integer_zerop (t2))
889 return fold_convert_loc (loc, type, t1);
891 else if (code == MINUS_EXPR)
893 if (integer_zerop (t2))
894 return fold_convert_loc (loc, type, t1);
897 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
898 fold_convert_loc (loc, type, t2));
901 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
909 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
911 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
913 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
928 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
929 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
930 && TYPE_MODE (type1) == TYPE_MODE (type2);
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
939 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
941 double_int op1, op2, res, tmp;
943 tree type = TREE_TYPE (arg1);
944 bool uns = TYPE_UNSIGNED (type);
946 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
947 bool overflow = false;
949 op1 = tree_to_double_int (arg1);
950 op2 = tree_to_double_int (arg2);
955 res = double_int_ior (op1, op2);
959 res = double_int_xor (op1, op2);
963 res = double_int_and (op1, op2);
967 res = double_int_rshift (op1, double_int_to_shwi (op2),
968 TYPE_PRECISION (type), !uns);
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res = double_int_lshift (op1, double_int_to_shwi (op2),
976 TYPE_PRECISION (type), !uns);
980 res = double_int_rrotate (op1, double_int_to_shwi (op2),
981 TYPE_PRECISION (type));
985 res = double_int_lrotate (op1, double_int_to_shwi (op2),
986 TYPE_PRECISION (type));
990 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
991 &res.low, &res.high);
995 neg_double (op2.low, op2.high, &res.low, &res.high);
996 add_double (op1.low, op1.high, res.low, res.high,
997 &res.low, &res.high);
998 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1002 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1003 &res.low, &res.high);
1006 case TRUNC_DIV_EXPR:
1007 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1008 case EXACT_DIV_EXPR:
1009 /* This is a shortcut for a common special case. */
1010 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1011 && !TREE_OVERFLOW (arg1)
1012 && !TREE_OVERFLOW (arg2)
1013 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1015 if (code == CEIL_DIV_EXPR)
1016 op1.low += op2.low - 1;
1018 res.low = op1.low / op2.low, res.high = 0;
1022 /* ... fall through ... */
1024 case ROUND_DIV_EXPR:
1025 if (double_int_zero_p (op2))
1027 if (double_int_one_p (op2))
1032 if (double_int_equal_p (op1, op2)
1033 && ! double_int_zero_p (op1))
1035 res = double_int_one;
1038 overflow = div_and_round_double (code, uns,
1039 op1.low, op1.high, op2.low, op2.high,
1040 &res.low, &res.high,
1041 &tmp.low, &tmp.high);
1044 case TRUNC_MOD_EXPR:
1045 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1046 /* This is a shortcut for a common special case. */
1047 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1048 && !TREE_OVERFLOW (arg1)
1049 && !TREE_OVERFLOW (arg2)
1050 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1052 if (code == CEIL_MOD_EXPR)
1053 op1.low += op2.low - 1;
1054 res.low = op1.low % op2.low, res.high = 0;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR:
1061 if (double_int_zero_p (op2))
1063 overflow = div_and_round_double (code, uns,
1064 op1.low, op1.high, op2.low, op2.high,
1065 &tmp.low, &tmp.high,
1066 &res.low, &res.high);
1070 res = double_int_min (op1, op2, uns);
1074 res = double_int_max (op1, op2, uns);
1081 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1082 ((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1094 const_binop (enum tree_code code, tree arg1, tree arg2)
1096 /* Sanity check for the recursive cases. */
1103 if (TREE_CODE (arg1) == INTEGER_CST)
1104 return int_const_binop (code, arg1, arg2);
1106 if (TREE_CODE (arg1) == REAL_CST)
1108 enum machine_mode mode;
1111 REAL_VALUE_TYPE value;
1112 REAL_VALUE_TYPE result;
1116 /* The following codes are handled by real_arithmetic. */
1131 d1 = TREE_REAL_CST (arg1);
1132 d2 = TREE_REAL_CST (arg2);
1134 type = TREE_TYPE (arg1);
1135 mode = TYPE_MODE (type);
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode)
1140 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code == RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2, dconst0)
1147 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1))
1154 else if (REAL_VALUE_ISNAN (d2))
1157 inexact = real_arithmetic (&value, code, &d1, &d2);
1158 real_convert (&result, mode, &value);
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode)
1164 && REAL_VALUE_ISINF (result)
1165 && !REAL_VALUE_ISINF (d1)
1166 && !REAL_VALUE_ISINF (d2))
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1175 && (inexact || !real_identical (&result, &value)))
1178 t = build_real (type, result);
1180 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1184 if (TREE_CODE (arg1) == FIXED_CST)
1186 FIXED_VALUE_TYPE f1;
1187 FIXED_VALUE_TYPE f2;
1188 FIXED_VALUE_TYPE result;
1193 /* The following codes are handled by fixed_arithmetic. */
1199 case TRUNC_DIV_EXPR:
1200 f2 = TREE_FIXED_CST (arg2);
1205 f2.data.high = TREE_INT_CST_HIGH (arg2);
1206 f2.data.low = TREE_INT_CST_LOW (arg2);
1214 f1 = TREE_FIXED_CST (arg1);
1215 type = TREE_TYPE (arg1);
1216 sat_p = TYPE_SATURATING (type);
1217 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1218 t = build_fixed (type, result);
1219 /* Propagate overflow flags. */
1220 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1221 TREE_OVERFLOW (t) = 1;
1225 if (TREE_CODE (arg1) == COMPLEX_CST)
1227 tree type = TREE_TYPE (arg1);
1228 tree r1 = TREE_REALPART (arg1);
1229 tree i1 = TREE_IMAGPART (arg1);
1230 tree r2 = TREE_REALPART (arg2);
1231 tree i2 = TREE_IMAGPART (arg2);
1238 real = const_binop (code, r1, r2);
1239 imag = const_binop (code, i1, i2);
1243 if (COMPLEX_FLOAT_TYPE_P (type))
1244 return do_mpc_arg2 (arg1, arg2, type,
1245 /* do_nonfinite= */ folding_initializer,
1248 real = const_binop (MINUS_EXPR,
1249 const_binop (MULT_EXPR, r1, r2),
1250 const_binop (MULT_EXPR, i1, i2));
1251 imag = const_binop (PLUS_EXPR,
1252 const_binop (MULT_EXPR, r1, i2),
1253 const_binop (MULT_EXPR, i1, r2));
1257 if (COMPLEX_FLOAT_TYPE_P (type))
1258 return do_mpc_arg2 (arg1, arg2, type,
1259 /* do_nonfinite= */ folding_initializer,
1262 case TRUNC_DIV_EXPR:
1264 case FLOOR_DIV_EXPR:
1265 case ROUND_DIV_EXPR:
1266 if (flag_complex_method == 0)
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r2, r2),
1278 const_binop (MULT_EXPR, i2, i2));
1280 = const_binop (PLUS_EXPR,
1281 const_binop (MULT_EXPR, r1, r2),
1282 const_binop (MULT_EXPR, i1, i2));
1284 = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, i1, r2),
1286 const_binop (MULT_EXPR, r1, i2));
1288 real = const_binop (code, t1, magsquared);
1289 imag = const_binop (code, t2, magsquared);
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1299 fold_abs_const (r2, TREE_TYPE (type)),
1300 fold_abs_const (i2, TREE_TYPE (type)));
1302 if (integer_nonzerop (compare))
1304 /* In the TRUE branch, we compute
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1311 tree ratio = const_binop (code, r2, i2);
1312 tree div = const_binop (PLUS_EXPR, i2,
1313 const_binop (MULT_EXPR, r2, ratio));
1314 real = const_binop (MULT_EXPR, r1, ratio);
1315 real = const_binop (PLUS_EXPR, real, i1);
1316 real = const_binop (code, real, div);
1318 imag = const_binop (MULT_EXPR, i1, ratio);
1319 imag = const_binop (MINUS_EXPR, imag, r1);
1320 imag = const_binop (code, imag, div);
1324 /* In the FALSE branch, we compute
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1331 tree ratio = const_binop (code, i2, r2);
1332 tree div = const_binop (PLUS_EXPR, r2,
1333 const_binop (MULT_EXPR, i2, ratio));
1335 real = const_binop (MULT_EXPR, i1, ratio);
1336 real = const_binop (PLUS_EXPR, real, r1);
1337 real = const_binop (code, real, div);
1339 imag = const_binop (MULT_EXPR, r1, ratio);
1340 imag = const_binop (MINUS_EXPR, i1, imag);
1341 imag = const_binop (code, imag, div);
1351 return build_complex (type, real, imag);
1354 if (TREE_CODE (arg1) == VECTOR_CST)
1356 tree type = TREE_TYPE(arg1);
1357 int count = TYPE_VECTOR_SUBPARTS (type), i;
1358 tree elements1, elements2, list = NULL_TREE;
1360 if(TREE_CODE(arg2) != VECTOR_CST)
1363 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1364 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1366 for (i = 0; i < count; i++)
1368 tree elem1, elem2, elem;
1370 /* The trailing elements can be empty and should be treated as 0 */
1372 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1375 elem1 = TREE_VALUE(elements1);
1376 elements1 = TREE_CHAIN (elements1);
1380 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1383 elem2 = TREE_VALUE(elements2);
1384 elements2 = TREE_CHAIN (elements2);
1387 elem = const_binop (code, elem1, elem2);
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if(elem == NULL_TREE)
1394 list = tree_cons (NULL_TREE, elem, list);
1396 return build_vector(type, nreverse(list));
1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1402 indicates which particular sizetype to create. */
1405 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1407 return build_int_cst (sizetype_tab[(int) kind], number);
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be equivalent integer types, ala int_binop_types_match_p.
1413 If the operands are constant, so is the result. */
1416 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1418 tree type = TREE_TYPE (arg0);
1420 if (arg0 == error_mark_node || arg1 == error_mark_node)
1421 return error_mark_node;
1423 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1426 /* Handle the special case of two integer constants faster. */
1427 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1429 /* And some specific cases even faster than that. */
1430 if (code == PLUS_EXPR)
1432 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1434 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1437 else if (code == MINUS_EXPR)
1439 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1442 else if (code == MULT_EXPR)
1444 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1448 /* Handle general case of two integer constants. */
1449 return int_const_binop (code, arg0, arg1);
1452 return fold_build2_loc (loc, code, type, arg0, arg1);
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1460 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1462 tree type = TREE_TYPE (arg0);
1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1468 /* If the type is already signed, just do the simple thing. */
1469 if (!TYPE_UNSIGNED (type))
1470 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1472 if (type == sizetype)
1474 else if (type == bitsizetype)
1475 ctype = sbitsizetype;
1477 ctype = signed_type_for (type);
1479 /* If either operand is not a constant, do the conversions to the signed
1480 type and subtract. The hardware will do the right thing with any
1481 overflow in the subtraction. */
1482 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1483 return size_binop_loc (loc, MINUS_EXPR,
1484 fold_convert_loc (loc, ctype, arg0),
1485 fold_convert_loc (loc, ctype, arg1));
1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489 overflow) and negate (which can't either). Special-case a result
1490 of zero while we're here. */
1491 if (tree_int_cst_equal (arg0, arg1))
1492 return build_int_cst (ctype, 0);
1493 else if (tree_int_cst_lt (arg1, arg0))
1494 return fold_convert_loc (loc, ctype,
1495 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1497 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1498 fold_convert_loc (loc, ctype,
1499 size_binop_loc (loc,
1504 /* A subroutine of fold_convert_const handling conversions of an
1505 INTEGER_CST to another integer type. */
1508 fold_convert_const_int_from_int (tree type, const_tree arg1)
1512 /* Given an integer constant, make new constant with new type,
1513 appropriately sign-extended or truncated. */
1514 t = force_fit_type_double (type, tree_to_double_int (arg1),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1516 (TREE_INT_CST_HIGH (arg1) < 0
1517 && (TYPE_UNSIGNED (type)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1519 | TREE_OVERFLOW (arg1));
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1528 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1544 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1548 case FIX_TRUNC_EXPR:
1549 real_trunc (&r, VOIDmode, &x);
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r))
1560 val = double_int_zero;
1563 /* See if R is less than the lower bound or greater than the
1568 tree lt = TYPE_MIN_VALUE (type);
1569 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1570 if (REAL_VALUES_LESS (r, l))
1573 val = tree_to_double_int (lt);
1579 tree ut = TYPE_MAX_VALUE (type);
1582 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1583 if (REAL_VALUES_LESS (u, r))
1586 val = tree_to_double_int (ut);
1592 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1594 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1598 /* A subroutine of fold_convert_const handling conversions of a
1599 FIXED_CST to an integer type. */
1602 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1605 double_int temp, temp_trunc;
1608 /* Right shift FIXED_CST to temp by fbit. */
1609 temp = TREE_FIXED_CST (arg1).data;
1610 mode = TREE_FIXED_CST (arg1).mode;
1611 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1613 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1614 HOST_BITS_PER_DOUBLE_INT,
1615 SIGNED_FIXED_POINT_MODE_P (mode));
1617 /* Left shift temp to temp_trunc by fbit. */
1618 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1619 HOST_BITS_PER_DOUBLE_INT,
1620 SIGNED_FIXED_POINT_MODE_P (mode));
1624 temp = double_int_zero;
1625 temp_trunc = double_int_zero;
1628 /* If FIXED_CST is negative, we need to round the value toward 0.
1629 By checking if the fractional bits are not zero to add 1 to temp. */
1630 if (SIGNED_FIXED_POINT_MODE_P (mode)
1631 && double_int_negative_p (temp_trunc)
1632 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1633 temp = double_int_add (temp, double_int_one);
1635 /* Given a fixed-point constant, make new constant with new type,
1636 appropriately sign-extended or truncated. */
1637 t = force_fit_type_double (type, temp, -1,
1638 (double_int_negative_p (temp)
1639 && (TYPE_UNSIGNED (type)
1640 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1641 | TREE_OVERFLOW (arg1));
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647 to another floating point type. */
1650 fold_convert_const_real_from_real (tree type, const_tree arg1)
1652 REAL_VALUE_TYPE value;
1655 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1656 t = build_real (type, value);
1658 /* If converting an infinity or NAN to a representation that doesn't
1659 have one, set the overflow bit so that we can produce some kind of
1660 error message at the appropriate point if necessary. It's not the
1661 most user-friendly message, but it's better than nothing. */
1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1664 TREE_OVERFLOW (t) = 1;
1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_NANS (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 /* Regular overflow, conversion produced an infinity in a mode that
1669 can't represent them. */
1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1671 && REAL_VALUE_ISINF (value)
1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1673 TREE_OVERFLOW (t) = 1;
1675 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680 to a floating point type. */
1683 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1685 REAL_VALUE_TYPE value;
1688 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1689 t = build_real (type, value);
1691 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to another fixed-point type. */
1699 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1701 FIXED_VALUE_TYPE value;
1705 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1706 TYPE_SATURATING (type));
1707 t = build_fixed (type, value);
1709 /* Propagate overflow flags. */
1710 if (overflow_p | TREE_OVERFLOW (arg1))
1711 TREE_OVERFLOW (t) = 1;
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716 to a fixed-point type. */
1719 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1721 FIXED_VALUE_TYPE value;
1725 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1726 TREE_INT_CST (arg1),
1727 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1728 TYPE_SATURATING (type));
1729 t = build_fixed (type, value);
1731 /* Propagate overflow flags. */
1732 if (overflow_p | TREE_OVERFLOW (arg1))
1733 TREE_OVERFLOW (t) = 1;
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738 to a fixed-point type. */
1741 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1743 FIXED_VALUE_TYPE value;
1747 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1748 &TREE_REAL_CST (arg1),
1749 TYPE_SATURATING (type));
1750 t = build_fixed (type, value);
1752 /* Propagate overflow flags. */
1753 if (overflow_p | TREE_OVERFLOW (arg1))
1754 TREE_OVERFLOW (t) = 1;
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759 type TYPE. If no simplification can be done return NULL_TREE. */
1762 fold_convert_const (enum tree_code code, tree type, tree arg1)
1764 if (TREE_TYPE (arg1) == type)
1767 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1768 || TREE_CODE (type) == OFFSET_TYPE)
1770 if (TREE_CODE (arg1) == INTEGER_CST)
1771 return fold_convert_const_int_from_int (type, arg1);
1772 else if (TREE_CODE (arg1) == REAL_CST)
1773 return fold_convert_const_int_from_real (code, type, arg1);
1774 else if (TREE_CODE (arg1) == FIXED_CST)
1775 return fold_convert_const_int_from_fixed (type, arg1);
1777 else if (TREE_CODE (type) == REAL_TYPE)
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return build_real_from_int_cst (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_real_from_real (type, arg1);
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_real_from_fixed (type, arg1);
1786 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1788 if (TREE_CODE (arg1) == FIXED_CST)
1789 return fold_convert_const_fixed_from_fixed (type, arg1);
1790 else if (TREE_CODE (arg1) == INTEGER_CST)
1791 return fold_convert_const_fixed_from_int (type, arg1);
1792 else if (TREE_CODE (arg1) == REAL_CST)
1793 return fold_convert_const_fixed_from_real (type, arg1);
1798 /* Construct a vector of zero elements of vector type TYPE. */
1801 build_zero_vector (tree type)
1805 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1806 return build_vector_from_val (type, t);
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1812 fold_convertible_p (const_tree type, const_tree arg)
1814 tree orig = TREE_TYPE (arg);
1819 if (TREE_CODE (arg) == ERROR_MARK
1820 || TREE_CODE (type) == ERROR_MARK
1821 || TREE_CODE (orig) == ERROR_MARK)
1824 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1827 switch (TREE_CODE (type))
1829 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1830 case POINTER_TYPE: case REFERENCE_TYPE:
1832 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1833 || TREE_CODE (orig) == OFFSET_TYPE)
1835 return (TREE_CODE (orig) == VECTOR_TYPE
1836 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1839 case FIXED_POINT_TYPE:
1843 return TREE_CODE (type) == TREE_CODE (orig);
1850 /* Convert expression ARG to type TYPE. Used by the middle-end for
1851 simple conversions in preference to calling the front-end's convert. */
1854 fold_convert_loc (location_t loc, tree type, tree arg)
1856 tree orig = TREE_TYPE (arg);
1862 if (TREE_CODE (arg) == ERROR_MARK
1863 || TREE_CODE (type) == ERROR_MARK
1864 || TREE_CODE (orig) == ERROR_MARK)
1865 return error_mark_node;
1867 switch (TREE_CODE (type))
1870 case REFERENCE_TYPE:
1871 /* Handle conversions between pointers to different address spaces. */
1872 if (POINTER_TYPE_P (orig)
1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1875 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1878 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1880 if (TREE_CODE (arg) == INTEGER_CST)
1882 tem = fold_convert_const (NOP_EXPR, type, arg);
1883 if (tem != NULL_TREE)
1886 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1887 || TREE_CODE (orig) == OFFSET_TYPE)
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1889 if (TREE_CODE (orig) == COMPLEX_TYPE)
1890 return fold_convert_loc (loc, type,
1891 fold_build1_loc (loc, REALPART_EXPR,
1892 TREE_TYPE (orig), arg));
1893 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1895 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1898 if (TREE_CODE (arg) == INTEGER_CST)
1900 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1901 if (tem != NULL_TREE)
1904 else if (TREE_CODE (arg) == REAL_CST)
1906 tem = fold_convert_const (NOP_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1910 else if (TREE_CODE (arg) == FIXED_CST)
1912 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1917 switch (TREE_CODE (orig))
1920 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1921 case POINTER_TYPE: case REFERENCE_TYPE:
1922 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1925 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1927 case FIXED_POINT_TYPE:
1928 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1931 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1932 return fold_convert_loc (loc, type, tem);
1938 case FIXED_POINT_TYPE:
1939 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1940 || TREE_CODE (arg) == REAL_CST)
1942 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1944 goto fold_convert_exit;
1947 switch (TREE_CODE (orig))
1949 case FIXED_POINT_TYPE:
1954 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1957 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1958 return fold_convert_loc (loc, type, tem);
1965 switch (TREE_CODE (orig))
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1971 case FIXED_POINT_TYPE:
1972 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1973 fold_convert_loc (loc, TREE_TYPE (type), arg),
1974 fold_convert_loc (loc, TREE_TYPE (type),
1975 integer_zero_node));
1980 if (TREE_CODE (arg) == COMPLEX_EXPR)
1982 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1983 TREE_OPERAND (arg, 0));
1984 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1985 TREE_OPERAND (arg, 1));
1986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1989 arg = save_expr (arg);
1990 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1991 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1992 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1993 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1994 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2002 if (integer_zerop (arg))
2003 return build_zero_vector (type);
2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2005 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2006 || TREE_CODE (orig) == VECTOR_TYPE);
2007 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2010 tem = fold_ignored_result (arg);
2011 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2014 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2015 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2019 protected_set_expr_location_unshare (tem, loc);
2023 /* Return false if expr can be assumed not to be an lvalue, true
2027 maybe_lvalue_p (const_tree x)
2029 /* We only need to wrap lvalue tree codes. */
2030 switch (TREE_CODE (x))
2043 case ARRAY_RANGE_REF:
2049 case PREINCREMENT_EXPR:
2050 case PREDECREMENT_EXPR:
2052 case TRY_CATCH_EXPR:
2053 case WITH_CLEANUP_EXPR:
2062 /* Assume the worst for front-end tree codes. */
2063 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2071 /* Return an expr equal to X but certainly not valid as an lvalue. */
2074 non_lvalue_loc (location_t loc, tree x)
2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2081 if (! maybe_lvalue_p (x))
2083 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087 Zero means allow extended lvalues. */
2089 int pedantic_lvalues;
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092 pedantic lvalue. Otherwise, return X. */
2095 pedantic_non_lvalue_loc (location_t loc, tree x)
2097 if (pedantic_lvalues)
2098 return non_lvalue_loc (loc, x);
2100 return protected_set_expr_location_unshare (x, loc);
2103 /* Given a tree comparison code, return the code that is the logical inverse
2104 of the given code. It is not safe to do this for floating-point
2105 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2106 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2109 invert_tree_comparison (enum tree_code code, bool honor_nans)
2111 if (honor_nans && flag_trapping_math)
2121 return honor_nans ? UNLE_EXPR : LE_EXPR;
2123 return honor_nans ? UNLT_EXPR : LT_EXPR;
2125 return honor_nans ? UNGE_EXPR : GE_EXPR;
2127 return honor_nans ? UNGT_EXPR : GT_EXPR;
2141 return UNORDERED_EXPR;
2142 case UNORDERED_EXPR:
2143 return ORDERED_EXPR;
2149 /* Similar, but return the comparison that results if the operands are
2150 swapped. This is safe for floating-point. */
2153 swap_tree_comparison (enum tree_code code)
2160 case UNORDERED_EXPR:
2186 /* Convert a comparison tree code from an enum tree_code representation
2187 into a compcode bit-based encoding. This function is the inverse of
2188 compcode_to_comparison. */
2190 static enum comparison_code
2191 comparison_to_compcode (enum tree_code code)
2208 return COMPCODE_ORD;
2209 case UNORDERED_EXPR:
2210 return COMPCODE_UNORD;
2212 return COMPCODE_UNLT;
2214 return COMPCODE_UNEQ;
2216 return COMPCODE_UNLE;
2218 return COMPCODE_UNGT;
2220 return COMPCODE_LTGT;
2222 return COMPCODE_UNGE;
2228 /* Convert a compcode bit-based encoding of a comparison operator back
2229 to GCC's enum tree_code representation. This function is the
2230 inverse of comparison_to_compcode. */
2232 static enum tree_code
2233 compcode_to_comparison (enum comparison_code code)
2250 return ORDERED_EXPR;
2251 case COMPCODE_UNORD:
2252 return UNORDERED_EXPR;
2270 /* Return a tree for the comparison which is the combination of
2271 doing the AND or OR (depending on CODE) of the two operations LCODE
2272 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2273 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2274 if this makes the transformation invalid. */
2277 combine_comparisons (location_t loc,
2278 enum tree_code code, enum tree_code lcode,
2279 enum tree_code rcode, tree truth_type,
2280 tree ll_arg, tree lr_arg)
2282 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2283 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2284 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2289 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2290 compcode = lcompcode & rcompcode;
2293 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2294 compcode = lcompcode | rcompcode;
2303 /* Eliminate unordered comparisons, as well as LTGT and ORD
2304 which are not used unless the mode has NaNs. */
2305 compcode &= ~COMPCODE_UNORD;
2306 if (compcode == COMPCODE_LTGT)
2307 compcode = COMPCODE_NE;
2308 else if (compcode == COMPCODE_ORD)
2309 compcode = COMPCODE_TRUE;
2311 else if (flag_trapping_math)
2313 /* Check that the original operation and the optimized ones will trap
2314 under the same condition. */
2315 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2316 && (lcompcode != COMPCODE_EQ)
2317 && (lcompcode != COMPCODE_ORD);
2318 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2319 && (rcompcode != COMPCODE_EQ)
2320 && (rcompcode != COMPCODE_ORD);
2321 bool trap = (compcode & COMPCODE_UNORD) == 0
2322 && (compcode != COMPCODE_EQ)
2323 && (compcode != COMPCODE_ORD);
2325 /* In a short-circuited boolean expression the LHS might be
2326 such that the RHS, if evaluated, will never trap. For
2327 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2328 if neither x nor y is NaN. (This is a mixed blessing: for
2329 example, the expression above will never trap, hence
2330 optimizing it to x < y would be invalid). */
2331 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2332 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2335 /* If the comparison was short-circuited, and only the RHS
2336 trapped, we may now generate a spurious trap. */
2338 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2341 /* If we changed the conditions that cause a trap, we lose. */
2342 if ((ltrap || rtrap) != trap)
2346 if (compcode == COMPCODE_TRUE)
2347 return constant_boolean_node (true, truth_type);
2348 else if (compcode == COMPCODE_FALSE)
2349 return constant_boolean_node (false, truth_type);
2352 enum tree_code tcode;
2354 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2355 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2359 /* Return nonzero if two operands (typically of the same tree node)
2360 are necessarily equal. If either argument has side-effects this
2361 function returns zero. FLAGS modifies behavior as follows:
2363 If OEP_ONLY_CONST is set, only return nonzero for constants.
2364 This function tests whether the operands are indistinguishable;
2365 it does not test whether they are equal using C's == operation.
2366 The distinction is important for IEEE floating point, because
2367 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2368 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2370 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2371 even though it may hold multiple values during a function.
2372 This is because a GCC tree node guarantees that nothing else is
2373 executed between the evaluation of its "operands" (which may often
2374 be evaluated in arbitrary order). Hence if the operands themselves
2375 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2376 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2377 unset means assuming isochronic (or instantaneous) tree equivalence.
2378 Unless comparing arbitrary expression trees, such as from different
2379 statements, this flag can usually be left unset.
2381 If OEP_PURE_SAME is set, then pure functions with identical arguments
2382 are considered the same. It is used when the caller has other ways
2383 to ensure that global memory is unchanged in between. */
2386 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2388 /* If either is ERROR_MARK, they aren't equal. */
2389 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2390 || TREE_TYPE (arg0) == error_mark_node
2391 || TREE_TYPE (arg1) == error_mark_node)
2394 /* Similar, if either does not have a type (like a released SSA name),
2395 they aren't equal. */
2396 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2399 /* Check equality of integer constants before bailing out due to
2400 precision differences. */
2401 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2402 return tree_int_cst_equal (arg0, arg1);
2404 /* If both types don't have the same signedness, then we can't consider
2405 them equal. We must check this before the STRIP_NOPS calls
2406 because they may change the signedness of the arguments. As pointers
2407 strictly don't have a signedness, require either two pointers or
2408 two non-pointers as well. */
2409 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2410 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2413 /* We cannot consider pointers to different address space equal. */
2414 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2419 /* If both types don't have the same precision, then it is not safe
2421 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2427 /* In case both args are comparisons but with different comparison
2428 code, try to swap the comparison operands of one arg to produce
2429 a match and compare that variant. */
2430 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2431 && COMPARISON_CLASS_P (arg0)
2432 && COMPARISON_CLASS_P (arg1))
2434 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2436 if (TREE_CODE (arg0) == swap_code)
2437 return operand_equal_p (TREE_OPERAND (arg0, 0),
2438 TREE_OPERAND (arg1, 1), flags)
2439 && operand_equal_p (TREE_OPERAND (arg0, 1),
2440 TREE_OPERAND (arg1, 0), flags);
2443 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2444 /* This is needed for conversions and for COMPONENT_REF.
2445 Might as well play it safe and always test this. */
2446 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2447 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2448 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2451 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2452 We don't care about side effects in that case because the SAVE_EXPR
2453 takes care of that for us. In all other cases, two expressions are
2454 equal if they have no side effects. If we have two identical
2455 expressions with side effects that should be treated the same due
2456 to the only side effects being identical SAVE_EXPR's, that will
2457 be detected in the recursive calls below.
2458 If we are taking an invariant address of two identical objects
2459 they are necessarily equal as well. */
2460 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2461 && (TREE_CODE (arg0) == SAVE_EXPR
2462 || (flags & OEP_CONSTANT_ADDRESS_OF)
2463 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2466 /* Next handle constant cases, those for which we can return 1 even
2467 if ONLY_CONST is set. */
2468 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2469 switch (TREE_CODE (arg0))
2472 return tree_int_cst_equal (arg0, arg1);
2475 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2476 TREE_FIXED_CST (arg1));
2479 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2480 TREE_REAL_CST (arg1)))
2484 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2486 /* If we do not distinguish between signed and unsigned zero,
2487 consider them equal. */
2488 if (real_zerop (arg0) && real_zerop (arg1))
2497 v1 = TREE_VECTOR_CST_ELTS (arg0);
2498 v2 = TREE_VECTOR_CST_ELTS (arg1);
2501 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2504 v1 = TREE_CHAIN (v1);
2505 v2 = TREE_CHAIN (v2);
2512 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2514 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2518 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2519 && ! memcmp (TREE_STRING_POINTER (arg0),
2520 TREE_STRING_POINTER (arg1),
2521 TREE_STRING_LENGTH (arg0)));
2524 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2525 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2526 ? OEP_CONSTANT_ADDRESS_OF : 0);
2531 if (flags & OEP_ONLY_CONST)
2534 /* Define macros to test an operand from arg0 and arg1 for equality and a
2535 variant that allows null and views null as being different from any
2536 non-null value. In the latter case, if either is null, the both
2537 must be; otherwise, do the normal comparison. */
2538 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2539 TREE_OPERAND (arg1, N), flags)
2541 #define OP_SAME_WITH_NULL(N) \
2542 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2543 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2545 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2548 /* Two conversions are equal only if signedness and modes match. */
2549 switch (TREE_CODE (arg0))
2552 case FIX_TRUNC_EXPR:
2553 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2554 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2564 case tcc_comparison:
2566 if (OP_SAME (0) && OP_SAME (1))
2569 /* For commutative ops, allow the other order. */
2570 return (commutative_tree_code (TREE_CODE (arg0))
2571 && operand_equal_p (TREE_OPERAND (arg0, 0),
2572 TREE_OPERAND (arg1, 1), flags)
2573 && operand_equal_p (TREE_OPERAND (arg0, 1),
2574 TREE_OPERAND (arg1, 0), flags));
2577 /* If either of the pointer (or reference) expressions we are
2578 dereferencing contain a side effect, these cannot be equal. */
2579 if (TREE_SIDE_EFFECTS (arg0)
2580 || TREE_SIDE_EFFECTS (arg1))
2583 switch (TREE_CODE (arg0))
2591 /* Require equal access sizes, and similar pointer types.
2592 We can have incomplete types for array references of
2593 variable-sized arrays from the Fortran frontent
2595 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2596 || (TYPE_SIZE (TREE_TYPE (arg0))
2597 && TYPE_SIZE (TREE_TYPE (arg1))
2598 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2599 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2600 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2601 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2602 && OP_SAME (0) && OP_SAME (1));
2605 case ARRAY_RANGE_REF:
2606 /* Operands 2 and 3 may be null.
2607 Compare the array index by value if it is constant first as we
2608 may have different types but same value here. */
2610 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 1))
2613 && OP_SAME_WITH_NULL (2)
2614 && OP_SAME_WITH_NULL (3));
2617 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2618 may be NULL when we're called to compare MEM_EXPRs. */
2619 return OP_SAME_WITH_NULL (0)
2621 && OP_SAME_WITH_NULL (2);
2624 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2630 case tcc_expression:
2631 switch (TREE_CODE (arg0))
2634 case TRUTH_NOT_EXPR:
2637 case TRUTH_ANDIF_EXPR:
2638 case TRUTH_ORIF_EXPR:
2639 return OP_SAME (0) && OP_SAME (1);
2642 case WIDEN_MULT_PLUS_EXPR:
2643 case WIDEN_MULT_MINUS_EXPR:
2646 /* The multiplcation operands are commutative. */
2649 case TRUTH_AND_EXPR:
2651 case TRUTH_XOR_EXPR:
2652 if (OP_SAME (0) && OP_SAME (1))
2655 /* Otherwise take into account this is a commutative operation. */
2656 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2657 TREE_OPERAND (arg1, 1), flags)
2658 && operand_equal_p (TREE_OPERAND (arg0, 1),
2659 TREE_OPERAND (arg1, 0), flags));
2664 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2671 switch (TREE_CODE (arg0))
2674 /* If the CALL_EXPRs call different functions, then they
2675 clearly can not be equal. */
2676 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2681 unsigned int cef = call_expr_flags (arg0);
2682 if (flags & OEP_PURE_SAME)
2683 cef &= ECF_CONST | ECF_PURE;
2690 /* Now see if all the arguments are the same. */
2692 const_call_expr_arg_iterator iter0, iter1;
2694 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2695 a1 = first_const_call_expr_arg (arg1, &iter1);
2697 a0 = next_const_call_expr_arg (&iter0),
2698 a1 = next_const_call_expr_arg (&iter1))
2699 if (! operand_equal_p (a0, a1, flags))
2702 /* If we get here and both argument lists are exhausted
2703 then the CALL_EXPRs are equal. */
2704 return ! (a0 || a1);
2710 case tcc_declaration:
2711 /* Consider __builtin_sqrt equal to sqrt. */
2712 return (TREE_CODE (arg0) == FUNCTION_DECL
2713 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2714 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2715 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2722 #undef OP_SAME_WITH_NULL
2725 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2726 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2728 When in doubt, return 0. */
2731 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2733 int unsignedp1, unsignedpo;
2734 tree primarg0, primarg1, primother;
2735 unsigned int correct_width;
2737 if (operand_equal_p (arg0, arg1, 0))
2740 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2741 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2744 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2745 and see if the inner values are the same. This removes any
2746 signedness comparison, which doesn't matter here. */
2747 primarg0 = arg0, primarg1 = arg1;
2748 STRIP_NOPS (primarg0);
2749 STRIP_NOPS (primarg1);
2750 if (operand_equal_p (primarg0, primarg1, 0))
2753 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2754 actual comparison operand, ARG0.
2756 First throw away any conversions to wider types
2757 already present in the operands. */
2759 primarg1 = get_narrower (arg1, &unsignedp1);
2760 primother = get_narrower (other, &unsignedpo);
2762 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2763 if (unsignedp1 == unsignedpo
2764 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2765 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2767 tree type = TREE_TYPE (arg0);
2769 /* Make sure shorter operand is extended the right way
2770 to match the longer operand. */
2771 primarg1 = fold_convert (signed_or_unsigned_type_for
2772 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2774 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2781 /* See if ARG is an expression that is either a comparison or is performing
2782 arithmetic on comparisons. The comparisons must only be comparing
2783 two different values, which will be stored in *CVAL1 and *CVAL2; if
2784 they are nonzero it means that some operands have already been found.
2785 No variables may be used anywhere else in the expression except in the
2786 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2787 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2789 If this is true, return 1. Otherwise, return zero. */
2792 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2794 enum tree_code code = TREE_CODE (arg);
2795 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2797 /* We can handle some of the tcc_expression cases here. */
2798 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2800 else if (tclass == tcc_expression
2801 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2802 || code == COMPOUND_EXPR))
2803 tclass = tcc_binary;
2805 else if (tclass == tcc_expression && code == SAVE_EXPR
2806 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2808 /* If we've already found a CVAL1 or CVAL2, this expression is
2809 two complex to handle. */
2810 if (*cval1 || *cval2)
2820 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2823 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2824 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2825 cval1, cval2, save_p));
2830 case tcc_expression:
2831 if (code == COND_EXPR)
2832 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2833 cval1, cval2, save_p)
2834 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2835 cval1, cval2, save_p)
2836 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2837 cval1, cval2, save_p));
2840 case tcc_comparison:
2841 /* First see if we can handle the first operand, then the second. For
2842 the second operand, we know *CVAL1 can't be zero. It must be that
2843 one side of the comparison is each of the values; test for the
2844 case where this isn't true by failing if the two operands
2847 if (operand_equal_p (TREE_OPERAND (arg, 0),
2848 TREE_OPERAND (arg, 1), 0))
2852 *cval1 = TREE_OPERAND (arg, 0);
2853 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2855 else if (*cval2 == 0)
2856 *cval2 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2862 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2864 else if (*cval2 == 0)
2865 *cval2 = TREE_OPERAND (arg, 1);
2866 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2878 /* ARG is a tree that is known to contain just arithmetic operations and
2879 comparisons. Evaluate the operations in the tree substituting NEW0 for
2880 any occurrence of OLD0 as an operand of a comparison and likewise for
2884 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2885 tree old1, tree new1)
2887 tree type = TREE_TYPE (arg);
2888 enum tree_code code = TREE_CODE (arg);
2889 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2891 /* We can handle some of the tcc_expression cases here. */
2892 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2894 else if (tclass == tcc_expression
2895 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2896 tclass = tcc_binary;
2901 return fold_build1_loc (loc, code, type,
2902 eval_subst (loc, TREE_OPERAND (arg, 0),
2903 old0, new0, old1, new1));
2906 return fold_build2_loc (loc, code, type,
2907 eval_subst (loc, TREE_OPERAND (arg, 0),
2908 old0, new0, old1, new1),
2909 eval_subst (loc, TREE_OPERAND (arg, 1),
2910 old0, new0, old1, new1));
2912 case tcc_expression:
2916 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2920 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2924 return fold_build3_loc (loc, code, type,
2925 eval_subst (loc, TREE_OPERAND (arg, 0),
2926 old0, new0, old1, new1),
2927 eval_subst (loc, TREE_OPERAND (arg, 1),
2928 old0, new0, old1, new1),
2929 eval_subst (loc, TREE_OPERAND (arg, 2),
2930 old0, new0, old1, new1));
2934 /* Fall through - ??? */
2936 case tcc_comparison:
2938 tree arg0 = TREE_OPERAND (arg, 0);
2939 tree arg1 = TREE_OPERAND (arg, 1);
2941 /* We need to check both for exact equality and tree equality. The
2942 former will be true if the operand has a side-effect. In that
2943 case, we know the operand occurred exactly once. */
2945 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2947 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2950 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2952 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2955 return fold_build2_loc (loc, code, type, arg0, arg1);
2963 /* Return a tree for the case when the result of an expression is RESULT
2964 converted to TYPE and OMITTED was previously an operand of the expression
2965 but is now not needed (e.g., we folded OMITTED * 0).
2967 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2968 the conversion of RESULT to TYPE. */
2971 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2973 tree t = fold_convert_loc (loc, type, result);
2975 /* If the resulting operand is an empty statement, just return the omitted
2976 statement casted to void. */
2977 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2978 return build1_loc (loc, NOP_EXPR, void_type_node,
2979 fold_ignored_result (omitted));
2981 if (TREE_SIDE_EFFECTS (omitted))
2982 return build2_loc (loc, COMPOUND_EXPR, type,
2983 fold_ignored_result (omitted), t);
2985 return non_lvalue_loc (loc, t);
2988 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2991 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2994 tree t = fold_convert_loc (loc, type, result);
2996 /* If the resulting operand is an empty statement, just return the omitted
2997 statement casted to void. */
2998 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2999 return build1_loc (loc, NOP_EXPR, void_type_node,
3000 fold_ignored_result (omitted));
3002 if (TREE_SIDE_EFFECTS (omitted))
3003 return build2_loc (loc, COMPOUND_EXPR, type,
3004 fold_ignored_result (omitted), t);
3006 return pedantic_non_lvalue_loc (loc, t);
3009 /* Return a tree for the case when the result of an expression is RESULT
3010 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3011 of the expression but are now not needed.
3013 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3014 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3015 evaluated before OMITTED2. Otherwise, if neither has side effects,
3016 just do the conversion of RESULT to TYPE. */
3019 omit_two_operands_loc (location_t loc, tree type, tree result,
3020 tree omitted1, tree omitted2)
3022 tree t = fold_convert_loc (loc, type, result);
3024 if (TREE_SIDE_EFFECTS (omitted2))
3025 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3026 if (TREE_SIDE_EFFECTS (omitted1))
3027 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3029 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3033 /* Return a simplified tree node for the truth-negation of ARG. This
3034 never alters ARG itself. We assume that ARG is an operation that
3035 returns a truth value (0 or 1).
3037 FIXME: one would think we would fold the result, but it causes
3038 problems with the dominator optimizer. */
3041 fold_truth_not_expr (location_t loc, tree arg)
3043 tree type = TREE_TYPE (arg);
3044 enum tree_code code = TREE_CODE (arg);
3045 location_t loc1, loc2;
3047 /* If this is a comparison, we can simply invert it, except for
3048 floating-point non-equality comparisons, in which case we just
3049 enclose a TRUTH_NOT_EXPR around what we have. */
3051 if (TREE_CODE_CLASS (code) == tcc_comparison)
3053 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3054 if (FLOAT_TYPE_P (op_type)
3055 && flag_trapping_math
3056 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3057 && code != NE_EXPR && code != EQ_EXPR)
3060 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3061 if (code == ERROR_MARK)
3064 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (arg, 1));
3071 return constant_boolean_node (integer_zerop (arg), type);
3073 case TRUTH_AND_EXPR:
3074 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3075 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3076 return build2_loc (loc, TRUTH_OR_EXPR, type,
3077 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3078 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3081 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3082 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3083 return build2_loc (loc, TRUTH_AND_EXPR, type,
3084 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3085 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3087 case TRUTH_XOR_EXPR:
3088 /* Here we can invert either operand. We invert the first operand
3089 unless the second operand is a TRUTH_NOT_EXPR in which case our
3090 result is the XOR of the first operand with the inside of the
3091 negation of the second operand. */
3093 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3094 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3095 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3097 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3098 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3099 TREE_OPERAND (arg, 1));
3101 case TRUTH_ANDIF_EXPR:
3102 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3103 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3104 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3105 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3106 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3108 case TRUTH_ORIF_EXPR:
3109 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3110 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3111 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3112 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3113 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3115 case TRUTH_NOT_EXPR:
3116 return TREE_OPERAND (arg, 0);
3120 tree arg1 = TREE_OPERAND (arg, 1);
3121 tree arg2 = TREE_OPERAND (arg, 2);
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3124 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3126 /* A COND_EXPR may have a throw as one operand, which
3127 then has void type. Just leave void operands
3129 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3130 VOID_TYPE_P (TREE_TYPE (arg1))
3131 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3132 VOID_TYPE_P (TREE_TYPE (arg2))
3133 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3137 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3138 return build2_loc (loc, COMPOUND_EXPR, type,
3139 TREE_OPERAND (arg, 0),
3140 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3142 case NON_LVALUE_EXPR:
3143 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3144 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3147 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3148 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3150 /* ... fall through ... */
3153 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3154 return build1_loc (loc, TREE_CODE (arg), type,
3155 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3158 if (!integer_onep (TREE_OPERAND (arg, 1)))
3160 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3163 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3165 case CLEANUP_POINT_EXPR:
3166 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3167 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3168 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3175 /* Return a simplified tree node for the truth-negation of ARG. This
3176 never alters ARG itself. We assume that ARG is an operation that
3177 returns a truth value (0 or 1).
3179 FIXME: one would think we would fold the result, but it causes
3180 problems with the dominator optimizer. */
3183 invert_truthvalue_loc (location_t loc, tree arg)
3187 if (TREE_CODE (arg) == ERROR_MARK)
3190 tem = fold_truth_not_expr (loc, arg);
3192 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3197 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3198 operands are another bit-wise operation with a common input. If so,
3199 distribute the bit operations to save an operation and possibly two if
3200 constants are involved. For example, convert
3201 (A | B) & (A | C) into A | (B & C)
3202 Further simplification will occur if B and C are constants.
3204 If this optimization cannot be done, 0 will be returned. */
3207 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3208 tree arg0, tree arg1)
3213 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3214 || TREE_CODE (arg0) == code
3215 || (TREE_CODE (arg0) != BIT_AND_EXPR
3216 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3219 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3221 common = TREE_OPERAND (arg0, 0);
3222 left = TREE_OPERAND (arg0, 1);
3223 right = TREE_OPERAND (arg1, 1);
3225 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3227 common = TREE_OPERAND (arg0, 0);
3228 left = TREE_OPERAND (arg0, 1);
3229 right = TREE_OPERAND (arg1, 0);
3231 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3233 common = TREE_OPERAND (arg0, 1);
3234 left = TREE_OPERAND (arg0, 0);
3235 right = TREE_OPERAND (arg1, 1);
3237 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3239 common = TREE_OPERAND (arg0, 1);
3240 left = TREE_OPERAND (arg0, 0);
3241 right = TREE_OPERAND (arg1, 0);
3246 common = fold_convert_loc (loc, type, common);
3247 left = fold_convert_loc (loc, type, left);
3248 right = fold_convert_loc (loc, type, right);
3249 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3250 fold_build2_loc (loc, code, type, left, right));
3253 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3254 with code CODE. This optimization is unsafe. */
3256 distribute_real_division (location_t loc, enum tree_code code, tree type,
3257 tree arg0, tree arg1)
3259 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3260 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3262 /* (A / C) +- (B / C) -> (A +- B) / C. */
3264 && operand_equal_p (TREE_OPERAND (arg0, 1),
3265 TREE_OPERAND (arg1, 1), 0))
3266 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3267 fold_build2_loc (loc, code, type,
3268 TREE_OPERAND (arg0, 0),
3269 TREE_OPERAND (arg1, 0)),
3270 TREE_OPERAND (arg0, 1));
3272 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3273 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3274 TREE_OPERAND (arg1, 0), 0)
3275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3276 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3278 REAL_VALUE_TYPE r0, r1;
3279 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3280 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3282 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3284 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3285 real_arithmetic (&r0, code, &r0, &r1);
3286 return fold_build2_loc (loc, MULT_EXPR, type,
3287 TREE_OPERAND (arg0, 0),
3288 build_real (type, r0));
3294 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3295 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3298 make_bit_field_ref (location_t loc, tree inner, tree type,
3299 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3301 tree result, bftype;
3305 tree size = TYPE_SIZE (TREE_TYPE (inner));
3306 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3307 || POINTER_TYPE_P (TREE_TYPE (inner)))
3308 && host_integerp (size, 0)
3309 && tree_low_cst (size, 0) == bitsize)
3310 return fold_convert_loc (loc, type, inner);
3314 if (TYPE_PRECISION (bftype) != bitsize
3315 || TYPE_UNSIGNED (bftype) == !unsignedp)
3316 bftype = build_nonstandard_integer_type (bitsize, 0);
3318 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3319 size_int (bitsize), bitsize_int (bitpos));
3322 result = fold_convert_loc (loc, type, result);
3327 /* Optimize a bit-field compare.
3329 There are two cases: First is a compare against a constant and the
3330 second is a comparison of two items where the fields are at the same
3331 bit position relative to the start of a chunk (byte, halfword, word)
3332 large enough to contain it. In these cases we can avoid the shift
3333 implicit in bitfield extractions.
3335 For constants, we emit a compare of the shifted constant with the
3336 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3337 compared. For two fields at the same position, we do the ANDs with the
3338 similar mask and compare the result of the ANDs.
3340 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3341 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3342 are the left and right operands of the comparison, respectively.
3344 If the optimization described above can be done, we return the resulting
3345 tree. Otherwise we return zero. */
3348 optimize_bit_field_compare (location_t loc, enum tree_code code,
3349 tree compare_type, tree lhs, tree rhs)
3351 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3352 tree type = TREE_TYPE (lhs);
3353 tree signed_type, unsigned_type;
3354 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3355 enum machine_mode lmode, rmode, nmode;
3356 int lunsignedp, runsignedp;
3357 int lvolatilep = 0, rvolatilep = 0;
3358 tree linner, rinner = NULL_TREE;
3362 /* Get all the information about the extractions being done. If the bit size
3363 if the same as the size of the underlying object, we aren't doing an
3364 extraction at all and so can do nothing. We also don't want to
3365 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3366 then will no longer be able to replace it. */
3367 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3368 &lunsignedp, &lvolatilep, false);
3369 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3370 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3375 /* If this is not a constant, we can only do something if bit positions,
3376 sizes, and signedness are the same. */
3377 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3378 &runsignedp, &rvolatilep, false);
3380 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3381 || lunsignedp != runsignedp || offset != 0
3382 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3386 /* See if we can find a mode to refer to this field. We should be able to,
3387 but fail if we can't. */
3389 && GET_MODE_BITSIZE (lmode) > 0
3390 && flag_strict_volatile_bitfields > 0)
3393 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3394 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3395 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3396 TYPE_ALIGN (TREE_TYPE (rinner))),
3397 word_mode, lvolatilep || rvolatilep);
3398 if (nmode == VOIDmode)
3401 /* Set signed and unsigned types of the precision of this mode for the
3403 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3404 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3406 /* Compute the bit position and size for the new reference and our offset
3407 within it. If the new reference is the same size as the original, we
3408 won't optimize anything, so return zero. */
3409 nbitsize = GET_MODE_BITSIZE (nmode);
3410 nbitpos = lbitpos & ~ (nbitsize - 1);
3412 if (nbitsize == lbitsize)
3415 if (BYTES_BIG_ENDIAN)
3416 lbitpos = nbitsize - lbitsize - lbitpos;
3418 /* Make the mask to be used against the extracted field. */
3419 mask = build_int_cst_type (unsigned_type, -1);
3420 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3421 mask = const_binop (RSHIFT_EXPR, mask,
3422 size_int (nbitsize - lbitsize - lbitpos));
3425 /* If not comparing with constant, just rework the comparison
3427 return fold_build2_loc (loc, code, compare_type,
3428 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3429 make_bit_field_ref (loc, linner,
3434 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3435 make_bit_field_ref (loc, rinner,
3441 /* Otherwise, we are handling the constant case. See if the constant is too
3442 big for the field. Warn and return a tree of for 0 (false) if so. We do
3443 this not only for its own sake, but to avoid having to test for this
3444 error case below. If we didn't, we might generate wrong code.
3446 For unsigned fields, the constant shifted right by the field length should
3447 be all zero. For signed fields, the high-order bits should agree with
3452 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3453 fold_convert_loc (loc,
3454 unsigned_type, rhs),
3455 size_int (lbitsize))))
3457 warning (0, "comparison is always %d due to width of bit-field",
3459 return constant_boolean_node (code == NE_EXPR, compare_type);
3464 tree tem = const_binop (RSHIFT_EXPR,
3465 fold_convert_loc (loc, signed_type, rhs),
3466 size_int (lbitsize - 1));
3467 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3469 warning (0, "comparison is always %d due to width of bit-field",
3471 return constant_boolean_node (code == NE_EXPR, compare_type);
3475 /* Single-bit compares should always be against zero. */
3476 if (lbitsize == 1 && ! integer_zerop (rhs))
3478 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3479 rhs = build_int_cst (type, 0);
3482 /* Make a new bitfield reference, shift the constant over the
3483 appropriate number of bits and mask it with the computed mask
3484 (in case this was a signed field). If we changed it, make a new one. */
3485 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3488 TREE_SIDE_EFFECTS (lhs) = 1;
3489 TREE_THIS_VOLATILE (lhs) = 1;
3492 rhs = const_binop (BIT_AND_EXPR,
3493 const_binop (LSHIFT_EXPR,
3494 fold_convert_loc (loc, unsigned_type, rhs),
3495 size_int (lbitpos)),
3498 lhs = build2_loc (loc, code, compare_type,
3499 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3503 /* Subroutine for fold_truth_andor_1: decode a field reference.
3505 If EXP is a comparison reference, we return the innermost reference.
3507 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3508 set to the starting bit number.
3510 If the innermost field can be completely contained in a mode-sized
3511 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3513 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3514 otherwise it is not changed.
3516 *PUNSIGNEDP is set to the signedness of the field.
3518 *PMASK is set to the mask used. This is either contained in a
3519 BIT_AND_EXPR or derived from the width of the field.
3521 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3523 Return 0 if this is not a component reference or is one that we can't
3524 do anything with. */
3527 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3528 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3529 int *punsignedp, int *pvolatilep,
3530 tree *pmask, tree *pand_mask)
3532 tree outer_type = 0;
3534 tree mask, inner, offset;
3536 unsigned int precision;
3538 /* All the optimizations using this function assume integer fields.
3539 There are problems with FP fields since the type_for_size call
3540 below can fail for, e.g., XFmode. */
3541 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3544 /* We are interested in the bare arrangement of bits, so strip everything
3545 that doesn't affect the machine mode. However, record the type of the
3546 outermost expression if it may matter below. */
3547 if (CONVERT_EXPR_P (exp)
3548 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3549 outer_type = TREE_TYPE (exp);
3552 if (TREE_CODE (exp) == BIT_AND_EXPR)
3554 and_mask = TREE_OPERAND (exp, 1);
3555 exp = TREE_OPERAND (exp, 0);
3556 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3557 if (TREE_CODE (and_mask) != INTEGER_CST)
3561 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3562 punsignedp, pvolatilep, false);
3563 if ((inner == exp && and_mask == 0)
3564 || *pbitsize < 0 || offset != 0
3565 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3568 /* If the number of bits in the reference is the same as the bitsize of
3569 the outer type, then the outer type gives the signedness. Otherwise
3570 (in case of a small bitfield) the signedness is unchanged. */
3571 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3572 *punsignedp = TYPE_UNSIGNED (outer_type);
3574 /* Compute the mask to access the bitfield. */
3575 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3576 precision = TYPE_PRECISION (unsigned_type);
3578 mask = build_int_cst_type (unsigned_type, -1);
3580 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3581 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3583 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3585 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3586 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3589 *pand_mask = and_mask;
3593 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3597 all_ones_mask_p (const_tree mask, int size)
3599 tree type = TREE_TYPE (mask);
3600 unsigned int precision = TYPE_PRECISION (type);
3603 tmask = build_int_cst_type (signed_type_for (type), -1);
3606 tree_int_cst_equal (mask,
3607 const_binop (RSHIFT_EXPR,
3608 const_binop (LSHIFT_EXPR, tmask,
3609 size_int (precision - size)),
3610 size_int (precision - size)));
3613 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3614 represents the sign bit of EXP's type. If EXP represents a sign
3615 or zero extension, also test VAL against the unextended type.
3616 The return value is the (sub)expression whose sign bit is VAL,
3617 or NULL_TREE otherwise. */
3620 sign_bit_p (tree exp, const_tree val)
3622 unsigned HOST_WIDE_INT mask_lo, lo;
3623 HOST_WIDE_INT mask_hi, hi;
3627 /* Tree EXP must have an integral type. */
3628 t = TREE_TYPE (exp);
3629 if (! INTEGRAL_TYPE_P (t))
3632 /* Tree VAL must be an integer constant. */
3633 if (TREE_CODE (val) != INTEGER_CST
3634 || TREE_OVERFLOW (val))
3637 width = TYPE_PRECISION (t);
3638 if (width > HOST_BITS_PER_WIDE_INT)
3640 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3643 mask_hi = ((unsigned HOST_WIDE_INT) -1
3644 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3650 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3653 mask_lo = ((unsigned HOST_WIDE_INT) -1
3654 >> (HOST_BITS_PER_WIDE_INT - width));
3657 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3658 treat VAL as if it were unsigned. */
3659 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3660 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3663 /* Handle extension from a narrower type. */
3664 if (TREE_CODE (exp) == NOP_EXPR
3665 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3666 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3671 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3672 to be evaluated unconditionally. */
3675 simple_operand_p (const_tree exp)
3677 /* Strip any conversions that don't change the machine mode. */
3680 return (CONSTANT_CLASS_P (exp)
3681 || TREE_CODE (exp) == SSA_NAME
3683 && ! TREE_ADDRESSABLE (exp)
3684 && ! TREE_THIS_VOLATILE (exp)
3685 && ! DECL_NONLOCAL (exp)
3686 /* Don't regard global variables as simple. They may be
3687 allocated in ways unknown to the compiler (shared memory,
3688 #pragma weak, etc). */
3689 && ! TREE_PUBLIC (exp)
3690 && ! DECL_EXTERNAL (exp)
3691 /* Loading a static variable is unduly expensive, but global
3692 registers aren't expensive. */
3693 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3696 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3697 to be evaluated unconditionally.
3698 I addition to simple_operand_p, we assume that comparisons and logic-not
3699 operations are simple, if their operands are simple, too. */
3702 simple_operand_p_2 (tree exp)
3704 enum tree_code code;
3706 /* Strip any conversions that don't change the machine mode. */
3709 code = TREE_CODE (exp);
3711 if (TREE_SIDE_EFFECTS (exp)
3712 || tree_could_trap_p (exp))
3715 if (TREE_CODE_CLASS (code) == tcc_comparison)
3716 return (simple_operand_p (TREE_OPERAND (exp, 0))
3717 && simple_operand_p (TREE_OPERAND (exp, 1)));
3719 if (code == TRUTH_NOT_EXPR)
3720 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3722 return simple_operand_p (exp);
3726 /* The following functions are subroutines to fold_range_test and allow it to
3727 try to change a logical combination of comparisons into a range test.
3730 X == 2 || X == 3 || X == 4 || X == 5
3734 (unsigned) (X - 2) <= 3
3736 We describe each set of comparisons as being either inside or outside
3737 a range, using a variable named like IN_P, and then describe the
3738 range with a lower and upper bound. If one of the bounds is omitted,
3739 it represents either the highest or lowest value of the type.
3741 In the comments below, we represent a range by two numbers in brackets
3742 preceded by a "+" to designate being inside that range, or a "-" to
3743 designate being outside that range, so the condition can be inverted by
3744 flipping the prefix. An omitted bound is represented by a "-". For
3745 example, "- [-, 10]" means being outside the range starting at the lowest
3746 possible value and ending at 10, in other words, being greater than 10.
3747 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3750 We set up things so that the missing bounds are handled in a consistent
3751 manner so neither a missing bound nor "true" and "false" need to be
3752 handled using a special case. */
3754 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3755 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3756 and UPPER1_P are nonzero if the respective argument is an upper bound
3757 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3758 must be specified for a comparison. ARG1 will be converted to ARG0's
3759 type if both are specified. */
3762 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3763 tree arg1, int upper1_p)
3769 /* If neither arg represents infinity, do the normal operation.
3770 Else, if not a comparison, return infinity. Else handle the special
3771 comparison rules. Note that most of the cases below won't occur, but
3772 are handled for consistency. */
3774 if (arg0 != 0 && arg1 != 0)
3776 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3777 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3779 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3782 if (TREE_CODE_CLASS (code) != tcc_comparison)
3785 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3786 for neither. In real maths, we cannot assume open ended ranges are
3787 the same. But, this is computer arithmetic, where numbers are finite.
3788 We can therefore make the transformation of any unbounded range with
3789 the value Z, Z being greater than any representable number. This permits
3790 us to treat unbounded ranges as equal. */
3791 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3792 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3796 result = sgn0 == sgn1;
3799 result = sgn0 != sgn1;
3802 result = sgn0 < sgn1;
3805 result = sgn0 <= sgn1;
3808 result = sgn0 > sgn1;
3811 result = sgn0 >= sgn1;
3817 return constant_boolean_node (result, type);
3820 /* Helper routine for make_range. Perform one step for it, return
3821 new expression if the loop should continue or NULL_TREE if it should
3825 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3826 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3827 bool *strict_overflow_p)
3829 tree arg0_type = TREE_TYPE (arg0);
3830 tree n_low, n_high, low = *p_low, high = *p_high;
3831 int in_p = *p_in_p, n_in_p;
3835 case TRUTH_NOT_EXPR:
3839 case EQ_EXPR: case NE_EXPR:
3840 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3841 /* We can only do something if the range is testing for zero
3842 and if the second operand is an integer constant. Note that
3843 saying something is "in" the range we make is done by
3844 complementing IN_P since it will set in the initial case of
3845 being not equal to zero; "out" is leaving it alone. */
3846 if (low == NULL_TREE || high == NULL_TREE
3847 || ! integer_zerop (low) || ! integer_zerop (high)
3848 || TREE_CODE (arg1) != INTEGER_CST)
3853 case NE_EXPR: /* - [c, c] */
3856 case EQ_EXPR: /* + [c, c] */
3857 in_p = ! in_p, low = high = arg1;
3859 case GT_EXPR: /* - [-, c] */
3860 low = 0, high = arg1;
3862 case GE_EXPR: /* + [c, -] */
3863 in_p = ! in_p, low = arg1, high = 0;
3865 case LT_EXPR: /* - [c, -] */
3866 low = arg1, high = 0;
3868 case LE_EXPR: /* + [-, c] */
3869 in_p = ! in_p, low = 0, high = arg1;
3875 /* If this is an unsigned comparison, we also know that EXP is
3876 greater than or equal to zero. We base the range tests we make
3877 on that fact, so we record it here so we can parse existing
3878 range tests. We test arg0_type since often the return type
3879 of, e.g. EQ_EXPR, is boolean. */
3880 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3882 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3884 build_int_cst (arg0_type, 0),
3888 in_p = n_in_p, low = n_low, high = n_high;
3890 /* If the high bound is missing, but we have a nonzero low
3891 bound, reverse the range so it goes from zero to the low bound
3893 if (high == 0 && low && ! integer_zerop (low))
3896 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3897 integer_one_node, 0);
3898 low = build_int_cst (arg0_type, 0);
3908 /* (-x) IN [a,b] -> x in [-b, -a] */
3909 n_low = range_binop (MINUS_EXPR, exp_type,
3910 build_int_cst (exp_type, 0),
3912 n_high = range_binop (MINUS_EXPR, exp_type,
3913 build_int_cst (exp_type, 0),
3915 if (n_high != 0 && TREE_OVERFLOW (n_high))
3921 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3922 build_int_cst (exp_type, 1));
3926 if (TREE_CODE (arg1) != INTEGER_CST)
3929 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3930 move a constant to the other side. */
3931 if (!TYPE_UNSIGNED (arg0_type)
3932 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3935 /* If EXP is signed, any overflow in the computation is undefined,
3936 so we don't worry about it so long as our computations on
3937 the bounds don't overflow. For unsigned, overflow is defined
3938 and this is exactly the right thing. */
3939 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3940 arg0_type, low, 0, arg1, 0);
3941 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3942 arg0_type, high, 1, arg1, 0);
3943 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3944 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3947 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3948 *strict_overflow_p = true;
3951 /* Check for an unsigned range which has wrapped around the maximum
3952 value thus making n_high < n_low, and normalize it. */
3953 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3955 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3956 integer_one_node, 0);
3957 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3958 integer_one_node, 0);
3960 /* If the range is of the form +/- [ x+1, x ], we won't
3961 be able to normalize it. But then, it represents the
3962 whole range or the empty set, so make it
3964 if (tree_int_cst_equal (n_low, low)
3965 && tree_int_cst_equal (n_high, high))
3971 low = n_low, high = n_high;
3979 case NON_LVALUE_EXPR:
3980 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3983 if (! INTEGRAL_TYPE_P (arg0_type)
3984 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3985 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3988 n_low = low, n_high = high;
3991 n_low = fold_convert_loc (loc, arg0_type, n_low);
3994 n_high = fold_convert_loc (loc, arg0_type, n_high);
3996 /* If we're converting arg0 from an unsigned type, to exp,
3997 a signed type, we will be doing the comparison as unsigned.
3998 The tests above have already verified that LOW and HIGH
4001 So we have to ensure that we will handle large unsigned
4002 values the same way that the current signed bounds treat
4005 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4009 /* For fixed-point modes, we need to pass the saturating flag
4010 as the 2nd parameter. */
4011 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4013 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4014 TYPE_SATURATING (arg0_type));
4017 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4019 /* A range without an upper bound is, naturally, unbounded.
4020 Since convert would have cropped a very large value, use
4021 the max value for the destination type. */
4023 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4024 : TYPE_MAX_VALUE (arg0_type);
4026 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4027 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4028 fold_convert_loc (loc, arg0_type,
4030 build_int_cst (arg0_type, 1));
4032 /* If the low bound is specified, "and" the range with the
4033 range for which the original unsigned value will be
4037 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4038 1, fold_convert_loc (loc, arg0_type,
4043 in_p = (n_in_p == in_p);
4047 /* Otherwise, "or" the range with the range of the input
4048 that will be interpreted as negative. */
4049 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4050 1, fold_convert_loc (loc, arg0_type,
4055 in_p = (in_p != n_in_p);
4069 /* Given EXP, a logical expression, set the range it is testing into
4070 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4071 actually being tested. *PLOW and *PHIGH will be made of the same
4072 type as the returned expression. If EXP is not a comparison, we
4073 will most likely not be returning a useful value and range. Set
4074 *STRICT_OVERFLOW_P to true if the return value is only valid
4075 because signed overflow is undefined; otherwise, do not change
4076 *STRICT_OVERFLOW_P. */
4079 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4080 bool *strict_overflow_p)
4082 enum tree_code code;
4083 tree arg0, arg1 = NULL_TREE;
4084 tree exp_type, nexp;
4087 location_t loc = EXPR_LOCATION (exp);
4089 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4090 and see if we can refine the range. Some of the cases below may not
4091 happen, but it doesn't seem worth worrying about this. We "continue"
4092 the outer loop when we've changed something; otherwise we "break"
4093 the switch, which will "break" the while. */
4096 low = high = build_int_cst (TREE_TYPE (exp), 0);
4100 code = TREE_CODE (exp);
4101 exp_type = TREE_TYPE (exp);
4104 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4106 if (TREE_OPERAND_LENGTH (exp) > 0)
4107 arg0 = TREE_OPERAND (exp, 0);
4108 if (TREE_CODE_CLASS (code) == tcc_binary
4109 || TREE_CODE_CLASS (code) == tcc_comparison
4110 || (TREE_CODE_CLASS (code) == tcc_expression
4111 && TREE_OPERAND_LENGTH (exp) > 1))
4112 arg1 = TREE_OPERAND (exp, 1);
4114 if (arg0 == NULL_TREE)
4117 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4118 &high, &in_p, strict_overflow_p);
4119 if (nexp == NULL_TREE)
4124 /* If EXP is a constant, we can evaluate whether this is true or false. */
4125 if (TREE_CODE (exp) == INTEGER_CST)
4127 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4129 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4135 *pin_p = in_p, *plow = low, *phigh = high;
4139 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4140 type, TYPE, return an expression to test if EXP is in (or out of, depending
4141 on IN_P) the range. Return 0 if the test couldn't be created. */
4144 build_range_check (location_t loc, tree type, tree exp, int in_p,
4145 tree low, tree high)
4147 tree etype = TREE_TYPE (exp), value;
4149 #ifdef HAVE_canonicalize_funcptr_for_compare
4150 /* Disable this optimization for function pointer expressions
4151 on targets that require function pointer canonicalization. */
4152 if (HAVE_canonicalize_funcptr_for_compare
4153 && TREE_CODE (etype) == POINTER_TYPE
4154 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4160 value = build_range_check (loc, type, exp, 1, low, high);
4162 return invert_truthvalue_loc (loc, value);
4167 if (low == 0 && high == 0)
4168 return build_int_cst (type, 1);
4171 return fold_build2_loc (loc, LE_EXPR, type, exp,
4172 fold_convert_loc (loc, etype, high));
4175 return fold_build2_loc (loc, GE_EXPR, type, exp,
4176 fold_convert_loc (loc, etype, low));
4178 if (operand_equal_p (low, high, 0))
4179 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4180 fold_convert_loc (loc, etype, low));
4182 if (integer_zerop (low))
4184 if (! TYPE_UNSIGNED (etype))
4186 etype = unsigned_type_for (etype);
4187 high = fold_convert_loc (loc, etype, high);
4188 exp = fold_convert_loc (loc, etype, exp);
4190 return build_range_check (loc, type, exp, 1, 0, high);
4193 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4194 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4196 unsigned HOST_WIDE_INT lo;
4200 prec = TYPE_PRECISION (etype);
4201 if (prec <= HOST_BITS_PER_WIDE_INT)
4204 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4208 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4209 lo = (unsigned HOST_WIDE_INT) -1;
4212 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4214 if (TYPE_UNSIGNED (etype))
4216 tree signed_etype = signed_type_for (etype);
4217 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4219 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4221 etype = signed_etype;
4222 exp = fold_convert_loc (loc, etype, exp);
4224 return fold_build2_loc (loc, GT_EXPR, type, exp,
4225 build_int_cst (etype, 0));
4229 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4230 This requires wrap-around arithmetics for the type of the expression.
4231 First make sure that arithmetics in this type is valid, then make sure
4232 that it wraps around. */
4233 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4234 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4235 TYPE_UNSIGNED (etype));
4237 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4239 tree utype, minv, maxv;
4241 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4242 for the type in question, as we rely on this here. */
4243 utype = unsigned_type_for (etype);
4244 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4245 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4246 integer_one_node, 1);
4247 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4249 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4256 high = fold_convert_loc (loc, etype, high);
4257 low = fold_convert_loc (loc, etype, low);
4258 exp = fold_convert_loc (loc, etype, exp);
4260 value = const_binop (MINUS_EXPR, high, low);
4263 if (POINTER_TYPE_P (etype))
4265 if (value != 0 && !TREE_OVERFLOW (value))
4267 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4268 return build_range_check (loc, type,
4269 fold_build_pointer_plus_loc (loc, exp, low),
4270 1, build_int_cst (etype, 0), value);
4275 if (value != 0 && !TREE_OVERFLOW (value))
4276 return build_range_check (loc, type,
4277 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4278 1, build_int_cst (etype, 0), value);
4283 /* Return the predecessor of VAL in its type, handling the infinite case. */
4286 range_predecessor (tree val)
4288 tree type = TREE_TYPE (val);
4290 if (INTEGRAL_TYPE_P (type)
4291 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4294 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4297 /* Return the successor of VAL in its type, handling the infinite case. */
4300 range_successor (tree val)
4302 tree type = TREE_TYPE (val);
4304 if (INTEGRAL_TYPE_P (type)
4305 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4308 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4311 /* Given two ranges, see if we can merge them into one. Return 1 if we
4312 can, 0 if we can't. Set the output range into the specified parameters. */
4315 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4316 tree high0, int in1_p, tree low1, tree high1)
4324 int lowequal = ((low0 == 0 && low1 == 0)
4325 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4326 low0, 0, low1, 0)));
4327 int highequal = ((high0 == 0 && high1 == 0)
4328 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4329 high0, 1, high1, 1)));
4331 /* Make range 0 be the range that starts first, or ends last if they
4332 start at the same value. Swap them if it isn't. */
4333 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4336 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4337 high1, 1, high0, 1))))
4339 temp = in0_p, in0_p = in1_p, in1_p = temp;
4340 tem = low0, low0 = low1, low1 = tem;
4341 tem = high0, high0 = high1, high1 = tem;
4344 /* Now flag two cases, whether the ranges are disjoint or whether the
4345 second range is totally subsumed in the first. Note that the tests
4346 below are simplified by the ones above. */
4347 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4348 high0, 1, low1, 0));
4349 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4350 high1, 1, high0, 1));
4352 /* We now have four cases, depending on whether we are including or
4353 excluding the two ranges. */
4356 /* If they don't overlap, the result is false. If the second range
4357 is a subset it is the result. Otherwise, the range is from the start
4358 of the second to the end of the first. */
4360 in_p = 0, low = high = 0;
4362 in_p = 1, low = low1, high = high1;
4364 in_p = 1, low = low1, high = high0;
4367 else if (in0_p && ! in1_p)
4369 /* If they don't overlap, the result is the first range. If they are
4370 equal, the result is false. If the second range is a subset of the
4371 first, and the ranges begin at the same place, we go from just after
4372 the end of the second range to the end of the first. If the second
4373 range is not a subset of the first, or if it is a subset and both
4374 ranges end at the same place, the range starts at the start of the
4375 first range and ends just before the second range.
4376 Otherwise, we can't describe this as a single range. */
4378 in_p = 1, low = low0, high = high0;
4379 else if (lowequal && highequal)
4380 in_p = 0, low = high = 0;
4381 else if (subset && lowequal)
4383 low = range_successor (high1);
4388 /* We are in the weird situation where high0 > high1 but
4389 high1 has no successor. Punt. */
4393 else if (! subset || highequal)
4396 high = range_predecessor (low1);
4400 /* low0 < low1 but low1 has no predecessor. Punt. */
4408 else if (! in0_p && in1_p)
4410 /* If they don't overlap, the result is the second range. If the second
4411 is a subset of the first, the result is false. Otherwise,
4412 the range starts just after the first range and ends at the
4413 end of the second. */
4415 in_p = 1, low = low1, high = high1;
4416 else if (subset || highequal)
4417 in_p = 0, low = high = 0;
4420 low = range_successor (high0);
4425 /* high1 > high0 but high0 has no successor. Punt. */
4433 /* The case where we are excluding both ranges. Here the complex case
4434 is if they don't overlap. In that case, the only time we have a
4435 range is if they are adjacent. If the second is a subset of the
4436 first, the result is the first. Otherwise, the range to exclude
4437 starts at the beginning of the first range and ends at the end of the
4441 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4442 range_successor (high0),
4444 in_p = 0, low = low0, high = high1;
4447 /* Canonicalize - [min, x] into - [-, x]. */
4448 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4449 switch (TREE_CODE (TREE_TYPE (low0)))
4452 if (TYPE_PRECISION (TREE_TYPE (low0))
4453 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4457 if (tree_int_cst_equal (low0,
4458 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4462 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4463 && integer_zerop (low0))
4470 /* Canonicalize - [x, max] into - [x, -]. */
4471 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4472 switch (TREE_CODE (TREE_TYPE (high1)))
4475 if (TYPE_PRECISION (TREE_TYPE (high1))
4476 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4480 if (tree_int_cst_equal (high1,
4481 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4485 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4486 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4488 integer_one_node, 1)))
4495 /* The ranges might be also adjacent between the maximum and
4496 minimum values of the given type. For
4497 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4498 return + [x + 1, y - 1]. */
4499 if (low0 == 0 && high1 == 0)
4501 low = range_successor (high0);
4502 high = range_predecessor (low1);
4503 if (low == 0 || high == 0)
4513 in_p = 0, low = low0, high = high0;
4515 in_p = 0, low = low0, high = high1;
4518 *pin_p = in_p, *plow = low, *phigh = high;
4523 /* Subroutine of fold, looking inside expressions of the form
4524 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4525 of the COND_EXPR. This function is being used also to optimize
4526 A op B ? C : A, by reversing the comparison first.
4528 Return a folded expression whose code is not a COND_EXPR
4529 anymore, or NULL_TREE if no folding opportunity is found. */
4532 fold_cond_expr_with_comparison (location_t loc, tree type,
4533 tree arg0, tree arg1, tree arg2)
4535 enum tree_code comp_code = TREE_CODE (arg0);
4536 tree arg00 = TREE_OPERAND (arg0, 0);
4537 tree arg01 = TREE_OPERAND (arg0, 1);
4538 tree arg1_type = TREE_TYPE (arg1);
4544 /* If we have A op 0 ? A : -A, consider applying the following
4547 A == 0? A : -A same as -A
4548 A != 0? A : -A same as A
4549 A >= 0? A : -A same as abs (A)
4550 A > 0? A : -A same as abs (A)
4551 A <= 0? A : -A same as -abs (A)
4552 A < 0? A : -A same as -abs (A)
4554 None of these transformations work for modes with signed
4555 zeros. If A is +/-0, the first two transformations will
4556 change the sign of the result (from +0 to -0, or vice
4557 versa). The last four will fix the sign of the result,
4558 even though the original expressions could be positive or
4559 negative, depending on the sign of A.
4561 Note that all these transformations are correct if A is
4562 NaN, since the two alternatives (A and -A) are also NaNs. */
4563 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4564 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4565 ? real_zerop (arg01)
4566 : integer_zerop (arg01))
4567 && ((TREE_CODE (arg2) == NEGATE_EXPR
4568 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4569 /* In the case that A is of the form X-Y, '-A' (arg2) may
4570 have already been folded to Y-X, check for that. */
4571 || (TREE_CODE (arg1) == MINUS_EXPR
4572 && TREE_CODE (arg2) == MINUS_EXPR
4573 && operand_equal_p (TREE_OPERAND (arg1, 0),
4574 TREE_OPERAND (arg2, 1), 0)
4575 && operand_equal_p (TREE_OPERAND (arg1, 1),
4576 TREE_OPERAND (arg2, 0), 0))))
4581 tem = fold_convert_loc (loc, arg1_type, arg1);
4582 return pedantic_non_lvalue_loc (loc,
4583 fold_convert_loc (loc, type,
4584 negate_expr (tem)));
4587 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4590 if (flag_trapping_math)
4595 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4596 arg1 = fold_convert_loc (loc, signed_type_for
4597 (TREE_TYPE (arg1)), arg1);
4598 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4599 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4602 if (flag_trapping_math)
4606 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4607 arg1 = fold_convert_loc (loc, signed_type_for
4608 (TREE_TYPE (arg1)), arg1);
4609 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4610 return negate_expr (fold_convert_loc (loc, type, tem));
4612 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4616 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4617 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4618 both transformations are correct when A is NaN: A != 0
4619 is then true, and A == 0 is false. */
4621 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4622 && integer_zerop (arg01) && integer_zerop (arg2))
4624 if (comp_code == NE_EXPR)
4625 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4626 else if (comp_code == EQ_EXPR)
4627 return build_int_cst (type, 0);
4630 /* Try some transformations of A op B ? A : B.
4632 A == B? A : B same as B
4633 A != B? A : B same as A
4634 A >= B? A : B same as max (A, B)
4635 A > B? A : B same as max (B, A)
4636 A <= B? A : B same as min (A, B)
4637 A < B? A : B same as min (B, A)
4639 As above, these transformations don't work in the presence
4640 of signed zeros. For example, if A and B are zeros of
4641 opposite sign, the first two transformations will change
4642 the sign of the result. In the last four, the original
4643 expressions give different results for (A=+0, B=-0) and
4644 (A=-0, B=+0), but the transformed expressions do not.
4646 The first two transformations are correct if either A or B
4647 is a NaN. In the first transformation, the condition will
4648 be false, and B will indeed be chosen. In the case of the
4649 second transformation, the condition A != B will be true,
4650 and A will be chosen.
4652 The conversions to max() and min() are not correct if B is
4653 a number and A is not. The conditions in the original
4654 expressions will be false, so all four give B. The min()
4655 and max() versions would give a NaN instead. */
4656 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4657 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4658 /* Avoid these transformations if the COND_EXPR may be used
4659 as an lvalue in the C++ front-end. PR c++/19199. */
4661 || (strcmp (lang_hooks.name, "GNU C++") != 0
4662 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4663 || ! maybe_lvalue_p (arg1)
4664 || ! maybe_lvalue_p (arg2)))
4666 tree comp_op0 = arg00;
4667 tree comp_op1 = arg01;
4668 tree comp_type = TREE_TYPE (comp_op0);
4670 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4671 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4681 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4683 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4688 /* In C++ a ?: expression can be an lvalue, so put the
4689 operand which will be used if they are equal first
4690 so that we can convert this back to the
4691 corresponding COND_EXPR. */
4692 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4694 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4695 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4696 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4697 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4698 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4699 comp_op1, comp_op0);
4700 return pedantic_non_lvalue_loc (loc,
4701 fold_convert_loc (loc, type, tem));
4708 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4710 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4711 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4712 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4713 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4714 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4715 comp_op1, comp_op0);
4716 return pedantic_non_lvalue_loc (loc,
4717 fold_convert_loc (loc, type, tem));
4721 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4722 return pedantic_non_lvalue_loc (loc,
4723 fold_convert_loc (loc, type, arg2));
4726 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4727 return pedantic_non_lvalue_loc (loc,
4728 fold_convert_loc (loc, type, arg1));
4731 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4736 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4737 we might still be able to simplify this. For example,
4738 if C1 is one less or one more than C2, this might have started
4739 out as a MIN or MAX and been transformed by this function.
4740 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4742 if (INTEGRAL_TYPE_P (type)
4743 && TREE_CODE (arg01) == INTEGER_CST
4744 && TREE_CODE (arg2) == INTEGER_CST)
4748 if (TREE_CODE (arg1) == INTEGER_CST)
4750 /* We can replace A with C1 in this case. */
4751 arg1 = fold_convert_loc (loc, type, arg01);
4752 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4755 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4756 MIN_EXPR, to preserve the signedness of the comparison. */
4757 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4759 && operand_equal_p (arg01,
4760 const_binop (PLUS_EXPR, arg2,
4761 build_int_cst (type, 1)),
4764 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4765 fold_convert_loc (loc, TREE_TYPE (arg00),
4767 return pedantic_non_lvalue_loc (loc,
4768 fold_convert_loc (loc, type, tem));
4773 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4775 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4777 && operand_equal_p (arg01,
4778 const_binop (MINUS_EXPR, arg2,
4779 build_int_cst (type, 1)),
4782 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4783 fold_convert_loc (loc, TREE_TYPE (arg00),
4785 return pedantic_non_lvalue_loc (loc,
4786 fold_convert_loc (loc, type, tem));
4791 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4792 MAX_EXPR, to preserve the signedness of the comparison. */
4793 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4795 && operand_equal_p (arg01,
4796 const_binop (MINUS_EXPR, arg2,
4797 build_int_cst (type, 1)),
4800 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4801 fold_convert_loc (loc, TREE_TYPE (arg00),
4803 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4808 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4809 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4811 && operand_equal_p (arg01,
4812 const_binop (PLUS_EXPR, arg2,
4813 build_int_cst (type, 1)),
4816 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4817 fold_convert_loc (loc, TREE_TYPE (arg00),
4819 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4833 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4834 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4835 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4839 /* EXP is some logical combination of boolean tests. See if we can
4840 merge it into some range test. Return the new tree if so. */
4843 fold_range_test (location_t loc, enum tree_code code, tree type,
4846 int or_op = (code == TRUTH_ORIF_EXPR
4847 || code == TRUTH_OR_EXPR);
4848 int in0_p, in1_p, in_p;
4849 tree low0, low1, low, high0, high1, high;
4850 bool strict_overflow_p = false;
4851 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4852 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4854 const char * const warnmsg = G_("assuming signed overflow does not occur "
4855 "when simplifying range test");
4857 /* If this is an OR operation, invert both sides; we will invert
4858 again at the end. */
4860 in0_p = ! in0_p, in1_p = ! in1_p;
4862 /* If both expressions are the same, if we can merge the ranges, and we
4863 can build the range test, return it or it inverted. If one of the
4864 ranges is always true or always false, consider it to be the same
4865 expression as the other. */
4866 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4867 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4869 && 0 != (tem = (build_range_check (loc, type,
4871 : rhs != 0 ? rhs : integer_zero_node,
4874 if (strict_overflow_p)
4875 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4876 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4879 /* On machines where the branch cost is expensive, if this is a
4880 short-circuited branch and the underlying object on both sides
4881 is the same, make a non-short-circuit operation. */
4882 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4883 && lhs != 0 && rhs != 0
4884 && (code == TRUTH_ANDIF_EXPR
4885 || code == TRUTH_ORIF_EXPR)
4886 && operand_equal_p (lhs, rhs, 0))
4888 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4889 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4890 which cases we can't do this. */
4891 if (simple_operand_p (lhs))
4892 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4893 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4896 else if (!lang_hooks.decls.global_bindings_p ()
4897 && !CONTAINS_PLACEHOLDER_P (lhs))
4899 tree common = save_expr (lhs);
4901 if (0 != (lhs = build_range_check (loc, type, common,
4902 or_op ? ! in0_p : in0_p,
4904 && (0 != (rhs = build_range_check (loc, type, common,
4905 or_op ? ! in1_p : in1_p,
4908 if (strict_overflow_p)
4909 fold_overflow_warning (warnmsg,
4910 WARN_STRICT_OVERFLOW_COMPARISON);
4911 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4912 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4921 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4922 bit value. Arrange things so the extra bits will be set to zero if and
4923 only if C is signed-extended to its full width. If MASK is nonzero,
4924 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4927 unextend (tree c, int p, int unsignedp, tree mask)
4929 tree type = TREE_TYPE (c);
4930 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4933 if (p == modesize || unsignedp)
4936 /* We work by getting just the sign bit into the low-order bit, then
4937 into the high-order bit, then sign-extend. We then XOR that value
4939 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4940 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4942 /* We must use a signed type in order to get an arithmetic right shift.
4943 However, we must also avoid introducing accidental overflows, so that
4944 a subsequent call to integer_zerop will work. Hence we must
4945 do the type conversion here. At this point, the constant is either
4946 zero or one, and the conversion to a signed type can never overflow.
4947 We could get an overflow if this conversion is done anywhere else. */
4948 if (TYPE_UNSIGNED (type))
4949 temp = fold_convert (signed_type_for (type), temp);
4951 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4952 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4954 temp = const_binop (BIT_AND_EXPR, temp,
4955 fold_convert (TREE_TYPE (c), mask));
4956 /* If necessary, convert the type back to match the type of C. */
4957 if (TYPE_UNSIGNED (type))
4958 temp = fold_convert (type, temp);
4960 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4963 /* For an expression that has the form
4967 we can drop one of the inner expressions and simplify to
4971 LOC is the location of the resulting expression. OP is the inner
4972 logical operation; the left-hand side in the examples above, while CMPOP
4973 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4974 removing a condition that guards another, as in
4975 (A != NULL && A->...) || A == NULL
4976 which we must not transform. If RHS_ONLY is true, only eliminate the
4977 right-most operand of the inner logical operation. */
4980 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4983 tree type = TREE_TYPE (cmpop);
4984 enum tree_code code = TREE_CODE (cmpop);
4985 enum tree_code truthop_code = TREE_CODE (op);
4986 tree lhs = TREE_OPERAND (op, 0);
4987 tree rhs = TREE_OPERAND (op, 1);
4988 tree orig_lhs = lhs, orig_rhs = rhs;
4989 enum tree_code rhs_code = TREE_CODE (rhs);
4990 enum tree_code lhs_code = TREE_CODE (lhs);
4991 enum tree_code inv_code;
4993 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4996 if (TREE_CODE_CLASS (code) != tcc_comparison)
4999 if (rhs_code == truthop_code)
5001 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5002 if (newrhs != NULL_TREE)
5005 rhs_code = TREE_CODE (rhs);
5008 if (lhs_code == truthop_code && !rhs_only)
5010 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5011 if (newlhs != NULL_TREE)
5014 lhs_code = TREE_CODE (lhs);
5018 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5019 if (inv_code == rhs_code
5020 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5021 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5023 if (!rhs_only && inv_code == lhs_code
5024 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5025 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5027 if (rhs != orig_rhs || lhs != orig_lhs)
5028 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5033 /* Find ways of folding logical expressions of LHS and RHS:
5034 Try to merge two comparisons to the same innermost item.
5035 Look for range tests like "ch >= '0' && ch <= '9'".
5036 Look for combinations of simple terms on machines with expensive branches
5037 and evaluate the RHS unconditionally.
5039 For example, if we have p->a == 2 && p->b == 4 and we can make an
5040 object large enough to span both A and B, we can do this with a comparison
5041 against the object ANDed with the a mask.
5043 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5044 operations to do this with one comparison.
5046 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5047 function and the one above.
5049 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5050 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5052 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5055 We return the simplified tree or 0 if no optimization is possible. */
5058 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5061 /* If this is the "or" of two comparisons, we can do something if
5062 the comparisons are NE_EXPR. If this is the "and", we can do something
5063 if the comparisons are EQ_EXPR. I.e.,
5064 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5066 WANTED_CODE is this operation code. For single bit fields, we can
5067 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5068 comparison for one-bit fields. */
5070 enum tree_code wanted_code;
5071 enum tree_code lcode, rcode;
5072 tree ll_arg, lr_arg, rl_arg, rr_arg;
5073 tree ll_inner, lr_inner, rl_inner, rr_inner;
5074 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5075 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5076 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5077 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5078 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5079 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5080 enum machine_mode lnmode, rnmode;
5081 tree ll_mask, lr_mask, rl_mask, rr_mask;
5082 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5083 tree l_const, r_const;
5084 tree lntype, rntype, result;
5085 HOST_WIDE_INT first_bit, end_bit;
5088 /* Start by getting the comparison codes. Fail if anything is volatile.
5089 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5090 it were surrounded with a NE_EXPR. */
5092 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5095 lcode = TREE_CODE (lhs);
5096 rcode = TREE_CODE (rhs);
5098 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5100 lhs = build2 (NE_EXPR, truth_type, lhs,
5101 build_int_cst (TREE_TYPE (lhs), 0));
5105 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5107 rhs = build2 (NE_EXPR, truth_type, rhs,
5108 build_int_cst (TREE_TYPE (rhs), 0));
5112 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5113 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5116 ll_arg = TREE_OPERAND (lhs, 0);
5117 lr_arg = TREE_OPERAND (lhs, 1);
5118 rl_arg = TREE_OPERAND (rhs, 0);
5119 rr_arg = TREE_OPERAND (rhs, 1);
5121 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5122 if (simple_operand_p (ll_arg)
5123 && simple_operand_p (lr_arg))
5125 if (operand_equal_p (ll_arg, rl_arg, 0)
5126 && operand_equal_p (lr_arg, rr_arg, 0))
5128 result = combine_comparisons (loc, code, lcode, rcode,
5129 truth_type, ll_arg, lr_arg);
5133 else if (operand_equal_p (ll_arg, rr_arg, 0)
5134 && operand_equal_p (lr_arg, rl_arg, 0))
5136 result = combine_comparisons (loc, code, lcode,
5137 swap_tree_comparison (rcode),
5138 truth_type, ll_arg, lr_arg);
5144 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5145 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5147 /* If the RHS can be evaluated unconditionally and its operands are
5148 simple, it wins to evaluate the RHS unconditionally on machines
5149 with expensive branches. In this case, this isn't a comparison
5150 that can be merged. */
5152 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5154 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5155 && simple_operand_p (rl_arg)
5156 && simple_operand_p (rr_arg))
5158 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5159 if (code == TRUTH_OR_EXPR
5160 && lcode == NE_EXPR && integer_zerop (lr_arg)
5161 && rcode == NE_EXPR && integer_zerop (rr_arg)
5162 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5163 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5164 return build2_loc (loc, NE_EXPR, truth_type,
5165 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5167 build_int_cst (TREE_TYPE (ll_arg), 0));
5169 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5170 if (code == TRUTH_AND_EXPR
5171 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5172 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5173 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5174 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5175 return build2_loc (loc, EQ_EXPR, truth_type,
5176 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5178 build_int_cst (TREE_TYPE (ll_arg), 0));
5181 /* See if the comparisons can be merged. Then get all the parameters for
5184 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5185 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5189 ll_inner = decode_field_reference (loc, ll_arg,
5190 &ll_bitsize, &ll_bitpos, &ll_mode,
5191 &ll_unsignedp, &volatilep, &ll_mask,
5193 lr_inner = decode_field_reference (loc, lr_arg,
5194 &lr_bitsize, &lr_bitpos, &lr_mode,
5195 &lr_unsignedp, &volatilep, &lr_mask,
5197 rl_inner = decode_field_reference (loc, rl_arg,
5198 &rl_bitsize, &rl_bitpos, &rl_mode,
5199 &rl_unsignedp, &volatilep, &rl_mask,
5201 rr_inner = decode_field_reference (loc, rr_arg,
5202 &rr_bitsize, &rr_bitpos, &rr_mode,
5203 &rr_unsignedp, &volatilep, &rr_mask,
5206 /* It must be true that the inner operation on the lhs of each
5207 comparison must be the same if we are to be able to do anything.
5208 Then see if we have constants. If not, the same must be true for
5210 if (volatilep || ll_inner == 0 || rl_inner == 0
5211 || ! operand_equal_p (ll_inner, rl_inner, 0))
5214 if (TREE_CODE (lr_arg) == INTEGER_CST
5215 && TREE_CODE (rr_arg) == INTEGER_CST)
5216 l_const = lr_arg, r_const = rr_arg;
5217 else if (lr_inner == 0 || rr_inner == 0
5218 || ! operand_equal_p (lr_inner, rr_inner, 0))
5221 l_const = r_const = 0;
5223 /* If either comparison code is not correct for our logical operation,
5224 fail. However, we can convert a one-bit comparison against zero into
5225 the opposite comparison against that bit being set in the field. */
5227 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5228 if (lcode != wanted_code)
5230 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5232 /* Make the left operand unsigned, since we are only interested
5233 in the value of one bit. Otherwise we are doing the wrong
5242 /* This is analogous to the code for l_const above. */
5243 if (rcode != wanted_code)
5245 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5254 /* See if we can find a mode that contains both fields being compared on
5255 the left. If we can't, fail. Otherwise, update all constants and masks
5256 to be relative to a field of that size. */
5257 first_bit = MIN (ll_bitpos, rl_bitpos);
5258 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5259 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5260 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5262 if (lnmode == VOIDmode)
5265 lnbitsize = GET_MODE_BITSIZE (lnmode);
5266 lnbitpos = first_bit & ~ (lnbitsize - 1);
5267 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5268 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5270 if (BYTES_BIG_ENDIAN)
5272 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5273 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5276 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5277 size_int (xll_bitpos));
5278 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5279 size_int (xrl_bitpos));
5283 l_const = fold_convert_loc (loc, lntype, l_const);
5284 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5285 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5286 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5287 fold_build1_loc (loc, BIT_NOT_EXPR,
5290 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5292 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5297 r_const = fold_convert_loc (loc, lntype, r_const);
5298 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5299 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5300 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5301 fold_build1_loc (loc, BIT_NOT_EXPR,
5304 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5306 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5310 /* If the right sides are not constant, do the same for it. Also,
5311 disallow this optimization if a size or signedness mismatch occurs
5312 between the left and right sides. */
5315 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5316 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5317 /* Make sure the two fields on the right
5318 correspond to the left without being swapped. */
5319 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5322 first_bit = MIN (lr_bitpos, rr_bitpos);
5323 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5324 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5325 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5327 if (rnmode == VOIDmode)
5330 rnbitsize = GET_MODE_BITSIZE (rnmode);
5331 rnbitpos = first_bit & ~ (rnbitsize - 1);
5332 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5333 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5335 if (BYTES_BIG_ENDIAN)
5337 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5338 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5341 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5343 size_int (xlr_bitpos));
5344 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5346 size_int (xrr_bitpos));
5348 /* Make a mask that corresponds to both fields being compared.
5349 Do this for both items being compared. If the operands are the
5350 same size and the bits being compared are in the same position
5351 then we can do this by masking both and comparing the masked
5353 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5354 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5355 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5357 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5358 ll_unsignedp || rl_unsignedp);
5359 if (! all_ones_mask_p (ll_mask, lnbitsize))
5360 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5362 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5363 lr_unsignedp || rr_unsignedp);
5364 if (! all_ones_mask_p (lr_mask, rnbitsize))
5365 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5367 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5370 /* There is still another way we can do something: If both pairs of
5371 fields being compared are adjacent, we may be able to make a wider
5372 field containing them both.
5374 Note that we still must mask the lhs/rhs expressions. Furthermore,
5375 the mask must be shifted to account for the shift done by
5376 make_bit_field_ref. */
5377 if ((ll_bitsize + ll_bitpos == rl_bitpos
5378 && lr_bitsize + lr_bitpos == rr_bitpos)
5379 || (ll_bitpos == rl_bitpos + rl_bitsize
5380 && lr_bitpos == rr_bitpos + rr_bitsize))
5384 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5385 ll_bitsize + rl_bitsize,
5386 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5387 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5388 lr_bitsize + rr_bitsize,
5389 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5391 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5392 size_int (MIN (xll_bitpos, xrl_bitpos)));
5393 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5394 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5396 /* Convert to the smaller type before masking out unwanted bits. */
5398 if (lntype != rntype)
5400 if (lnbitsize > rnbitsize)
5402 lhs = fold_convert_loc (loc, rntype, lhs);
5403 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5406 else if (lnbitsize < rnbitsize)
5408 rhs = fold_convert_loc (loc, lntype, rhs);
5409 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5414 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5415 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5417 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5418 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5420 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5426 /* Handle the case of comparisons with constants. If there is something in
5427 common between the masks, those bits of the constants must be the same.
5428 If not, the condition is always false. Test for this to avoid generating
5429 incorrect code below. */
5430 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5431 if (! integer_zerop (result)
5432 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5433 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5435 if (wanted_code == NE_EXPR)
5437 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5438 return constant_boolean_node (true, truth_type);
5442 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5443 return constant_boolean_node (false, truth_type);
5447 /* Construct the expression we will return. First get the component
5448 reference we will make. Unless the mask is all ones the width of
5449 that field, perform the mask operation. Then compare with the
5451 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5452 ll_unsignedp || rl_unsignedp);
5454 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5455 if (! all_ones_mask_p (ll_mask, lnbitsize))
5456 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5458 return build2_loc (loc, wanted_code, truth_type, result,
5459 const_binop (BIT_IOR_EXPR, l_const, r_const));
5462 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5466 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5470 enum tree_code op_code;
5473 int consts_equal, consts_lt;
5476 STRIP_SIGN_NOPS (arg0);
5478 op_code = TREE_CODE (arg0);
5479 minmax_const = TREE_OPERAND (arg0, 1);
5480 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5481 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5482 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5483 inner = TREE_OPERAND (arg0, 0);
5485 /* If something does not permit us to optimize, return the original tree. */
5486 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5487 || TREE_CODE (comp_const) != INTEGER_CST
5488 || TREE_OVERFLOW (comp_const)
5489 || TREE_CODE (minmax_const) != INTEGER_CST
5490 || TREE_OVERFLOW (minmax_const))
5493 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5494 and GT_EXPR, doing the rest with recursive calls using logical
5498 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5501 = optimize_minmax_comparison (loc,
5502 invert_tree_comparison (code, false),
5505 return invert_truthvalue_loc (loc, tem);
5511 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5512 optimize_minmax_comparison
5513 (loc, EQ_EXPR, type, arg0, comp_const),
5514 optimize_minmax_comparison
5515 (loc, GT_EXPR, type, arg0, comp_const));
5518 if (op_code == MAX_EXPR && consts_equal)
5519 /* MAX (X, 0) == 0 -> X <= 0 */
5520 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5522 else if (op_code == MAX_EXPR && consts_lt)
5523 /* MAX (X, 0) == 5 -> X == 5 */
5524 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5526 else if (op_code == MAX_EXPR)
5527 /* MAX (X, 0) == -1 -> false */
5528 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5530 else if (consts_equal)
5531 /* MIN (X, 0) == 0 -> X >= 0 */
5532 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5535 /* MIN (X, 0) == 5 -> false */
5536 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5539 /* MIN (X, 0) == -1 -> X == -1 */
5540 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5543 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5544 /* MAX (X, 0) > 0 -> X > 0
5545 MAX (X, 0) > 5 -> X > 5 */
5546 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5548 else if (op_code == MAX_EXPR)
5549 /* MAX (X, 0) > -1 -> true */
5550 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5552 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5553 /* MIN (X, 0) > 0 -> false
5554 MIN (X, 0) > 5 -> false */
5555 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5558 /* MIN (X, 0) > -1 -> X > -1 */
5559 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5566 /* T is an integer expression that is being multiplied, divided, or taken a
5567 modulus (CODE says which and what kind of divide or modulus) by a
5568 constant C. See if we can eliminate that operation by folding it with
5569 other operations already in T. WIDE_TYPE, if non-null, is a type that
5570 should be used for the computation if wider than our type.
5572 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5573 (X * 2) + (Y * 4). We must, however, be assured that either the original
5574 expression would not overflow or that overflow is undefined for the type
5575 in the language in question.
5577 If we return a non-null expression, it is an equivalent form of the
5578 original computation, but need not be in the original type.
5580 We set *STRICT_OVERFLOW_P to true if the return values depends on
5581 signed overflow being undefined. Otherwise we do not change
5582 *STRICT_OVERFLOW_P. */
5585 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5586 bool *strict_overflow_p)
5588 /* To avoid exponential search depth, refuse to allow recursion past
5589 three levels. Beyond that (1) it's highly unlikely that we'll find
5590 something interesting and (2) we've probably processed it before
5591 when we built the inner expression. */
5600 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5607 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5608 bool *strict_overflow_p)
5610 tree type = TREE_TYPE (t);
5611 enum tree_code tcode = TREE_CODE (t);
5612 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5613 > GET_MODE_SIZE (TYPE_MODE (type)))
5614 ? wide_type : type);
5616 int same_p = tcode == code;
5617 tree op0 = NULL_TREE, op1 = NULL_TREE;
5618 bool sub_strict_overflow_p;
5620 /* Don't deal with constants of zero here; they confuse the code below. */
5621 if (integer_zerop (c))
5624 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5625 op0 = TREE_OPERAND (t, 0);
5627 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5628 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5630 /* Note that we need not handle conditional operations here since fold
5631 already handles those cases. So just do arithmetic here. */
5635 /* For a constant, we can always simplify if we are a multiply
5636 or (for divide and modulus) if it is a multiple of our constant. */
5637 if (code == MULT_EXPR
5638 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5639 return const_binop (code, fold_convert (ctype, t),
5640 fold_convert (ctype, c));
5643 CASE_CONVERT: case NON_LVALUE_EXPR:
5644 /* If op0 is an expression ... */
5645 if ((COMPARISON_CLASS_P (op0)
5646 || UNARY_CLASS_P (op0)
5647 || BINARY_CLASS_P (op0)
5648 || VL_EXP_CLASS_P (op0)
5649 || EXPRESSION_CLASS_P (op0))
5650 /* ... and has wrapping overflow, and its type is smaller
5651 than ctype, then we cannot pass through as widening. */
5652 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5653 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5654 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5655 && (TYPE_PRECISION (ctype)
5656 > TYPE_PRECISION (TREE_TYPE (op0))))
5657 /* ... or this is a truncation (t is narrower than op0),
5658 then we cannot pass through this narrowing. */
5659 || (TYPE_PRECISION (type)
5660 < TYPE_PRECISION (TREE_TYPE (op0)))
5661 /* ... or signedness changes for division or modulus,
5662 then we cannot pass through this conversion. */
5663 || (code != MULT_EXPR
5664 && (TYPE_UNSIGNED (ctype)
5665 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5666 /* ... or has undefined overflow while the converted to
5667 type has not, we cannot do the operation in the inner type
5668 as that would introduce undefined overflow. */
5669 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5670 && !TYPE_OVERFLOW_UNDEFINED (type))))
5673 /* Pass the constant down and see if we can make a simplification. If
5674 we can, replace this expression with the inner simplification for
5675 possible later conversion to our or some other type. */
5676 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5677 && TREE_CODE (t2) == INTEGER_CST
5678 && !TREE_OVERFLOW (t2)
5679 && (0 != (t1 = extract_muldiv (op0, t2, code,
5681 ? ctype : NULL_TREE,
5682 strict_overflow_p))))
5687 /* If widening the type changes it from signed to unsigned, then we
5688 must avoid building ABS_EXPR itself as unsigned. */
5689 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5691 tree cstype = (*signed_type_for) (ctype);
5692 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5695 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5696 return fold_convert (ctype, t1);
5700 /* If the constant is negative, we cannot simplify this. */
5701 if (tree_int_cst_sgn (c) == -1)
5705 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5707 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5710 case MIN_EXPR: case MAX_EXPR:
5711 /* If widening the type changes the signedness, then we can't perform
5712 this optimization as that changes the result. */
5713 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5716 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5717 sub_strict_overflow_p = false;
5718 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5719 &sub_strict_overflow_p)) != 0
5720 && (t2 = extract_muldiv (op1, c, code, wide_type,
5721 &sub_strict_overflow_p)) != 0)
5723 if (tree_int_cst_sgn (c) < 0)
5724 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5725 if (sub_strict_overflow_p)
5726 *strict_overflow_p = true;
5727 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5728 fold_convert (ctype, t2));
5732 case LSHIFT_EXPR: case RSHIFT_EXPR:
5733 /* If the second operand is constant, this is a multiplication
5734 or floor division, by a power of two, so we can treat it that
5735 way unless the multiplier or divisor overflows. Signed
5736 left-shift overflow is implementation-defined rather than
5737 undefined in C90, so do not convert signed left shift into
5739 if (TREE_CODE (op1) == INTEGER_CST
5740 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5741 /* const_binop may not detect overflow correctly,
5742 so check for it explicitly here. */
5743 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5744 && TREE_INT_CST_HIGH (op1) == 0
5745 && 0 != (t1 = fold_convert (ctype,
5746 const_binop (LSHIFT_EXPR,
5749 && !TREE_OVERFLOW (t1))
5750 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5751 ? MULT_EXPR : FLOOR_DIV_EXPR,
5753 fold_convert (ctype, op0),
5755 c, code, wide_type, strict_overflow_p);
5758 case PLUS_EXPR: case MINUS_EXPR:
5759 /* See if we can eliminate the operation on both sides. If we can, we
5760 can return a new PLUS or MINUS. If we can't, the only remaining
5761 cases where we can do anything are if the second operand is a
5763 sub_strict_overflow_p = false;
5764 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5765 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5766 if (t1 != 0 && t2 != 0
5767 && (code == MULT_EXPR
5768 /* If not multiplication, we can only do this if both operands
5769 are divisible by c. */
5770 || (multiple_of_p (ctype, op0, c)
5771 && multiple_of_p (ctype, op1, c))))
5773 if (sub_strict_overflow_p)
5774 *strict_overflow_p = true;
5775 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5776 fold_convert (ctype, t2));
5779 /* If this was a subtraction, negate OP1 and set it to be an addition.
5780 This simplifies the logic below. */
5781 if (tcode == MINUS_EXPR)
5783 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5784 /* If OP1 was not easily negatable, the constant may be OP0. */
5785 if (TREE_CODE (op0) == INTEGER_CST)
5796 if (TREE_CODE (op1) != INTEGER_CST)
5799 /* If either OP1 or C are negative, this optimization is not safe for
5800 some of the division and remainder types while for others we need
5801 to change the code. */
5802 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5804 if (code == CEIL_DIV_EXPR)
5805 code = FLOOR_DIV_EXPR;
5806 else if (code == FLOOR_DIV_EXPR)
5807 code = CEIL_DIV_EXPR;
5808 else if (code != MULT_EXPR
5809 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5813 /* If it's a multiply or a division/modulus operation of a multiple
5814 of our constant, do the operation and verify it doesn't overflow. */
5815 if (code == MULT_EXPR
5816 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5818 op1 = const_binop (code, fold_convert (ctype, op1),
5819 fold_convert (ctype, c));
5820 /* We allow the constant to overflow with wrapping semantics. */
5822 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5828 /* If we have an unsigned type is not a sizetype, we cannot widen
5829 the operation since it will change the result if the original
5830 computation overflowed. */
5831 if (TYPE_UNSIGNED (ctype)
5832 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5836 /* If we were able to eliminate our operation from the first side,
5837 apply our operation to the second side and reform the PLUS. */
5838 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5839 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5841 /* The last case is if we are a multiply. In that case, we can
5842 apply the distributive law to commute the multiply and addition
5843 if the multiplication of the constants doesn't overflow. */
5844 if (code == MULT_EXPR)
5845 return fold_build2 (tcode, ctype,
5846 fold_build2 (code, ctype,
5847 fold_convert (ctype, op0),
5848 fold_convert (ctype, c)),
5854 /* We have a special case here if we are doing something like
5855 (C * 8) % 4 since we know that's zero. */
5856 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5857 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5858 /* If the multiplication can overflow we cannot optimize this.
5859 ??? Until we can properly mark individual operations as
5860 not overflowing we need to treat sizetype special here as
5861 stor-layout relies on this opimization to make
5862 DECL_FIELD_BIT_OFFSET always a constant. */
5863 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5864 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5865 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5866 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5867 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5869 *strict_overflow_p = true;
5870 return omit_one_operand (type, integer_zero_node, op0);
5873 /* ... fall through ... */
5875 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5876 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5877 /* If we can extract our operation from the LHS, do so and return a
5878 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5879 do something only if the second operand is a constant. */
5881 && (t1 = extract_muldiv (op0, c, code, wide_type,
5882 strict_overflow_p)) != 0)
5883 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5884 fold_convert (ctype, op1));
5885 else if (tcode == MULT_EXPR && code == MULT_EXPR
5886 && (t1 = extract_muldiv (op1, c, code, wide_type,
5887 strict_overflow_p)) != 0)
5888 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5889 fold_convert (ctype, t1));
5890 else if (TREE_CODE (op1) != INTEGER_CST)
5893 /* If these are the same operation types, we can associate them
5894 assuming no overflow. */
5899 mul = double_int_mul_with_sign
5901 (tree_to_double_int (op1),
5902 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5904 (tree_to_double_int (c),
5905 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5906 false, &overflow_p);
5907 overflow_p = (((!TYPE_UNSIGNED (ctype)
5908 || (TREE_CODE (ctype) == INTEGER_TYPE
5909 && TYPE_IS_SIZETYPE (ctype)))
5911 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5912 if (!double_int_fits_to_tree_p (ctype, mul)
5913 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5914 || !TYPE_UNSIGNED (ctype)
5915 || (TREE_CODE (ctype) == INTEGER_TYPE
5916 && TYPE_IS_SIZETYPE (ctype))))
5919 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5920 double_int_to_tree (ctype, mul));
5923 /* If these operations "cancel" each other, we have the main
5924 optimizations of this pass, which occur when either constant is a
5925 multiple of the other, in which case we replace this with either an
5926 operation or CODE or TCODE.
5928 If we have an unsigned type that is not a sizetype, we cannot do
5929 this since it will change the result if the original computation
5931 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5932 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5933 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5934 || (tcode == MULT_EXPR
5935 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5936 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5937 && code != MULT_EXPR)))
5939 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5941 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5942 *strict_overflow_p = true;
5943 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5944 fold_convert (ctype,
5945 const_binop (TRUNC_DIV_EXPR,
5948 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5950 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5951 *strict_overflow_p = true;
5952 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5953 fold_convert (ctype,
5954 const_binop (TRUNC_DIV_EXPR,
5967 /* Return a node which has the indicated constant VALUE (either 0 or
5968 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5969 and is of the indicated TYPE. */
5972 constant_boolean_node (bool value, tree type)
5974 if (type == integer_type_node)
5975 return value ? integer_one_node : integer_zero_node;
5976 else if (type == boolean_type_node)
5977 return value ? boolean_true_node : boolean_false_node;
5978 else if (TREE_CODE (type) == VECTOR_TYPE)
5979 return build_vector_from_val (type,
5980 build_int_cst (TREE_TYPE (type),
5983 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5987 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5988 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5989 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5990 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5991 COND is the first argument to CODE; otherwise (as in the example
5992 given here), it is the second argument. TYPE is the type of the
5993 original expression. Return NULL_TREE if no simplification is
5997 fold_binary_op_with_conditional_arg (location_t loc,
5998 enum tree_code code,
5999 tree type, tree op0, tree op1,
6000 tree cond, tree arg, int cond_first_p)
6002 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6003 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6004 tree test, true_value, false_value;
6005 tree lhs = NULL_TREE;
6006 tree rhs = NULL_TREE;
6008 if (TREE_CODE (cond) == COND_EXPR)
6010 test = TREE_OPERAND (cond, 0);
6011 true_value = TREE_OPERAND (cond, 1);
6012 false_value = TREE_OPERAND (cond, 2);
6013 /* If this operand throws an expression, then it does not make
6014 sense to try to perform a logical or arithmetic operation
6016 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6018 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6023 tree testtype = TREE_TYPE (cond);
6025 true_value = constant_boolean_node (true, testtype);
6026 false_value = constant_boolean_node (false, testtype);
6029 /* This transformation is only worthwhile if we don't have to wrap ARG
6030 in a SAVE_EXPR and the operation can be simplified on at least one
6031 of the branches once its pushed inside the COND_EXPR. */
6032 if (!TREE_CONSTANT (arg)
6033 && (TREE_SIDE_EFFECTS (arg)
6034 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6037 arg = fold_convert_loc (loc, arg_type, arg);
6040 true_value = fold_convert_loc (loc, cond_type, true_value);
6042 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6044 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6048 false_value = fold_convert_loc (loc, cond_type, false_value);
6050 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6052 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6055 /* Check that we have simplified at least one of the branches. */
6056 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6059 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6063 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6065 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6066 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6067 ADDEND is the same as X.
6069 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6070 and finite. The problematic cases are when X is zero, and its mode
6071 has signed zeros. In the case of rounding towards -infinity,
6072 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6073 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6076 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6078 if (!real_zerop (addend))
6081 /* Don't allow the fold with -fsignaling-nans. */
6082 if (HONOR_SNANS (TYPE_MODE (type)))
6085 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6086 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6089 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6090 if (TREE_CODE (addend) == REAL_CST
6091 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6094 /* The mode has signed zeros, and we have to honor their sign.
6095 In this situation, there is only one case we can return true for.
6096 X - 0 is the same as X unless rounding towards -infinity is
6098 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6101 /* Subroutine of fold() that checks comparisons of built-in math
6102 functions against real constants.
6104 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6105 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6106 is the type of the result and ARG0 and ARG1 are the operands of the
6107 comparison. ARG1 must be a TREE_REAL_CST.
6109 The function returns the constant folded tree if a simplification
6110 can be made, and NULL_TREE otherwise. */
6113 fold_mathfn_compare (location_t loc,
6114 enum built_in_function fcode, enum tree_code code,
6115 tree type, tree arg0, tree arg1)
6119 if (BUILTIN_SQRT_P (fcode))
6121 tree arg = CALL_EXPR_ARG (arg0, 0);
6122 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6124 c = TREE_REAL_CST (arg1);
6125 if (REAL_VALUE_NEGATIVE (c))
6127 /* sqrt(x) < y is always false, if y is negative. */
6128 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6129 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6131 /* sqrt(x) > y is always true, if y is negative and we
6132 don't care about NaNs, i.e. negative values of x. */
6133 if (code == NE_EXPR || !HONOR_NANS (mode))
6134 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6136 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6137 return fold_build2_loc (loc, GE_EXPR, type, arg,
6138 build_real (TREE_TYPE (arg), dconst0));
6140 else if (code == GT_EXPR || code == GE_EXPR)
6144 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6145 real_convert (&c2, mode, &c2);
6147 if (REAL_VALUE_ISINF (c2))
6149 /* sqrt(x) > y is x == +Inf, when y is very large. */
6150 if (HONOR_INFINITIES (mode))
6151 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6152 build_real (TREE_TYPE (arg), c2));
6154 /* sqrt(x) > y is always false, when y is very large
6155 and we don't care about infinities. */
6156 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6159 /* sqrt(x) > c is the same as x > c*c. */
6160 return fold_build2_loc (loc, code, type, arg,
6161 build_real (TREE_TYPE (arg), c2));
6163 else if (code == LT_EXPR || code == LE_EXPR)
6167 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6168 real_convert (&c2, mode, &c2);
6170 if (REAL_VALUE_ISINF (c2))
6172 /* sqrt(x) < y is always true, when y is a very large
6173 value and we don't care about NaNs or Infinities. */
6174 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6175 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6177 /* sqrt(x) < y is x != +Inf when y is very large and we
6178 don't care about NaNs. */
6179 if (! HONOR_NANS (mode))
6180 return fold_build2_loc (loc, NE_EXPR, type, arg,
6181 build_real (TREE_TYPE (arg), c2));
6183 /* sqrt(x) < y is x >= 0 when y is very large and we
6184 don't care about Infinities. */
6185 if (! HONOR_INFINITIES (mode))
6186 return fold_build2_loc (loc, GE_EXPR, type, arg,
6187 build_real (TREE_TYPE (arg), dconst0));
6189 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6190 arg = save_expr (arg);
6191 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6192 fold_build2_loc (loc, GE_EXPR, type, arg,
6193 build_real (TREE_TYPE (arg),
6195 fold_build2_loc (loc, NE_EXPR, type, arg,
6196 build_real (TREE_TYPE (arg),
6200 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6201 if (! HONOR_NANS (mode))
6202 return fold_build2_loc (loc, code, type, arg,
6203 build_real (TREE_TYPE (arg), c2));
6205 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6206 arg = save_expr (arg);
6207 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6208 fold_build2_loc (loc, GE_EXPR, type, arg,
6209 build_real (TREE_TYPE (arg),
6211 fold_build2_loc (loc, code, type, arg,
6212 build_real (TREE_TYPE (arg),
6220 /* Subroutine of fold() that optimizes comparisons against Infinities,
6221 either +Inf or -Inf.
6223 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6224 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6225 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6227 The function returns the constant folded tree if a simplification
6228 can be made, and NULL_TREE otherwise. */
6231 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6232 tree arg0, tree arg1)
6234 enum machine_mode mode;
6235 REAL_VALUE_TYPE max;
6239 mode = TYPE_MODE (TREE_TYPE (arg0));
6241 /* For negative infinity swap the sense of the comparison. */
6242 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6244 code = swap_tree_comparison (code);
6249 /* x > +Inf is always false, if with ignore sNANs. */
6250 if (HONOR_SNANS (mode))
6252 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6255 /* x <= +Inf is always true, if we don't case about NaNs. */
6256 if (! HONOR_NANS (mode))
6257 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6259 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6260 arg0 = save_expr (arg0);
6261 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6265 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6266 real_maxval (&max, neg, mode);
6267 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6268 arg0, build_real (TREE_TYPE (arg0), max));
6271 /* x < +Inf is always equal to x <= DBL_MAX. */
6272 real_maxval (&max, neg, mode);
6273 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6274 arg0, build_real (TREE_TYPE (arg0), max));
6277 /* x != +Inf is always equal to !(x > DBL_MAX). */
6278 real_maxval (&max, neg, mode);
6279 if (! HONOR_NANS (mode))
6280 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6281 arg0, build_real (TREE_TYPE (arg0), max));
6283 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6284 arg0, build_real (TREE_TYPE (arg0), max));
6285 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6294 /* Subroutine of fold() that optimizes comparisons of a division by
6295 a nonzero integer constant against an integer constant, i.e.
6298 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6299 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6300 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6302 The function returns the constant folded tree if a simplification
6303 can be made, and NULL_TREE otherwise. */
6306 fold_div_compare (location_t loc,
6307 enum tree_code code, tree type, tree arg0, tree arg1)
6309 tree prod, tmp, hi, lo;
6310 tree arg00 = TREE_OPERAND (arg0, 0);
6311 tree arg01 = TREE_OPERAND (arg0, 1);
6313 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6317 /* We have to do this the hard way to detect unsigned overflow.
6318 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6319 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6320 TREE_INT_CST_HIGH (arg01),
6321 TREE_INT_CST_LOW (arg1),
6322 TREE_INT_CST_HIGH (arg1),
6323 &val.low, &val.high, unsigned_p);
6324 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6325 neg_overflow = false;
6329 tmp = int_const_binop (MINUS_EXPR, arg01,
6330 build_int_cst (TREE_TYPE (arg01), 1));
6333 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6334 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6335 TREE_INT_CST_HIGH (prod),
6336 TREE_INT_CST_LOW (tmp),
6337 TREE_INT_CST_HIGH (tmp),
6338 &val.low, &val.high, unsigned_p);
6339 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6340 -1, overflow | TREE_OVERFLOW (prod));
6342 else if (tree_int_cst_sgn (arg01) >= 0)
6344 tmp = int_const_binop (MINUS_EXPR, arg01,
6345 build_int_cst (TREE_TYPE (arg01), 1));
6346 switch (tree_int_cst_sgn (arg1))
6349 neg_overflow = true;
6350 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6355 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6360 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6370 /* A negative divisor reverses the relational operators. */
6371 code = swap_tree_comparison (code);
6373 tmp = int_const_binop (PLUS_EXPR, arg01,
6374 build_int_cst (TREE_TYPE (arg01), 1));
6375 switch (tree_int_cst_sgn (arg1))
6378 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6383 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6388 neg_overflow = true;
6389 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6401 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6402 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6403 if (TREE_OVERFLOW (hi))
6404 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6405 if (TREE_OVERFLOW (lo))
6406 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6407 return build_range_check (loc, type, arg00, 1, lo, hi);
6410 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6411 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6412 if (TREE_OVERFLOW (hi))
6413 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6414 if (TREE_OVERFLOW (lo))
6415 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6416 return build_range_check (loc, type, arg00, 0, lo, hi);
6419 if (TREE_OVERFLOW (lo))
6421 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6422 return omit_one_operand_loc (loc, type, tmp, arg00);
6424 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6427 if (TREE_OVERFLOW (hi))
6429 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6430 return omit_one_operand_loc (loc, type, tmp, arg00);
6432 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6435 if (TREE_OVERFLOW (hi))
6437 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6438 return omit_one_operand_loc (loc, type, tmp, arg00);
6440 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6443 if (TREE_OVERFLOW (lo))
6445 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6446 return omit_one_operand_loc (loc, type, tmp, arg00);
6448 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6458 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6459 equality/inequality test, then return a simplified form of the test
6460 using a sign testing. Otherwise return NULL. TYPE is the desired
6464 fold_single_bit_test_into_sign_test (location_t loc,
6465 enum tree_code code, tree arg0, tree arg1,
6468 /* If this is testing a single bit, we can optimize the test. */
6469 if ((code == NE_EXPR || code == EQ_EXPR)
6470 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6471 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6473 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6474 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6475 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6477 if (arg00 != NULL_TREE
6478 /* This is only a win if casting to a signed type is cheap,
6479 i.e. when arg00's type is not a partial mode. */
6480 && TYPE_PRECISION (TREE_TYPE (arg00))
6481 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6483 tree stype = signed_type_for (TREE_TYPE (arg00));
6484 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6486 fold_convert_loc (loc, stype, arg00),
6487 build_int_cst (stype, 0));
6494 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6495 equality/inequality test, then return a simplified form of
6496 the test using shifts and logical operations. Otherwise return
6497 NULL. TYPE is the desired result type. */
6500 fold_single_bit_test (location_t loc, enum tree_code code,
6501 tree arg0, tree arg1, tree result_type)
6503 /* If this is testing a single bit, we can optimize the test. */
6504 if ((code == NE_EXPR || code == EQ_EXPR)
6505 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6506 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6508 tree inner = TREE_OPERAND (arg0, 0);
6509 tree type = TREE_TYPE (arg0);
6510 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6511 enum machine_mode operand_mode = TYPE_MODE (type);
6513 tree signed_type, unsigned_type, intermediate_type;
6516 /* First, see if we can fold the single bit test into a sign-bit
6518 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6523 /* Otherwise we have (A & C) != 0 where C is a single bit,
6524 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6525 Similarly for (A & C) == 0. */
6527 /* If INNER is a right shift of a constant and it plus BITNUM does
6528 not overflow, adjust BITNUM and INNER. */
6529 if (TREE_CODE (inner) == RSHIFT_EXPR
6530 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6531 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6532 && bitnum < TYPE_PRECISION (type)
6533 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6534 bitnum - TYPE_PRECISION (type)))
6536 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6537 inner = TREE_OPERAND (inner, 0);
6540 /* If we are going to be able to omit the AND below, we must do our
6541 operations as unsigned. If we must use the AND, we have a choice.
6542 Normally unsigned is faster, but for some machines signed is. */
6543 #ifdef LOAD_EXTEND_OP
6544 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6545 && !flag_syntax_only) ? 0 : 1;
6550 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6551 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6552 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6553 inner = fold_convert_loc (loc, intermediate_type, inner);
6556 inner = build2 (RSHIFT_EXPR, intermediate_type,
6557 inner, size_int (bitnum));
6559 one = build_int_cst (intermediate_type, 1);
6561 if (code == EQ_EXPR)
6562 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6564 /* Put the AND last so it can combine with more things. */
6565 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6567 /* Make sure to return the proper type. */
6568 inner = fold_convert_loc (loc, result_type, inner);
6575 /* Check whether we are allowed to reorder operands arg0 and arg1,
6576 such that the evaluation of arg1 occurs before arg0. */
6579 reorder_operands_p (const_tree arg0, const_tree arg1)
6581 if (! flag_evaluation_order)
6583 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6585 return ! TREE_SIDE_EFFECTS (arg0)
6586 && ! TREE_SIDE_EFFECTS (arg1);
6589 /* Test whether it is preferable two swap two operands, ARG0 and
6590 ARG1, for example because ARG0 is an integer constant and ARG1
6591 isn't. If REORDER is true, only recommend swapping if we can
6592 evaluate the operands in reverse order. */
6595 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6597 STRIP_SIGN_NOPS (arg0);
6598 STRIP_SIGN_NOPS (arg1);
6600 if (TREE_CODE (arg1) == INTEGER_CST)
6602 if (TREE_CODE (arg0) == INTEGER_CST)
6605 if (TREE_CODE (arg1) == REAL_CST)
6607 if (TREE_CODE (arg0) == REAL_CST)
6610 if (TREE_CODE (arg1) == FIXED_CST)
6612 if (TREE_CODE (arg0) == FIXED_CST)
6615 if (TREE_CODE (arg1) == COMPLEX_CST)
6617 if (TREE_CODE (arg0) == COMPLEX_CST)
6620 if (TREE_CONSTANT (arg1))
6622 if (TREE_CONSTANT (arg0))
6625 if (optimize_function_for_size_p (cfun))
6628 if (reorder && flag_evaluation_order
6629 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6632 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6633 for commutative and comparison operators. Ensuring a canonical
6634 form allows the optimizers to find additional redundancies without
6635 having to explicitly check for both orderings. */
6636 if (TREE_CODE (arg0) == SSA_NAME
6637 && TREE_CODE (arg1) == SSA_NAME
6638 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6641 /* Put SSA_NAMEs last. */
6642 if (TREE_CODE (arg1) == SSA_NAME)
6644 if (TREE_CODE (arg0) == SSA_NAME)
6647 /* Put variables last. */
6656 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6657 ARG0 is extended to a wider type. */
6660 fold_widened_comparison (location_t loc, enum tree_code code,
6661 tree type, tree arg0, tree arg1)
6663 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6665 tree shorter_type, outer_type;
6669 if (arg0_unw == arg0)
6671 shorter_type = TREE_TYPE (arg0_unw);
6673 #ifdef HAVE_canonicalize_funcptr_for_compare
6674 /* Disable this optimization if we're casting a function pointer
6675 type on targets that require function pointer canonicalization. */
6676 if (HAVE_canonicalize_funcptr_for_compare
6677 && TREE_CODE (shorter_type) == POINTER_TYPE
6678 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6682 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6685 arg1_unw = get_unwidened (arg1, NULL_TREE);
6687 /* If possible, express the comparison in the shorter mode. */
6688 if ((code == EQ_EXPR || code == NE_EXPR
6689 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6690 && (TREE_TYPE (arg1_unw) == shorter_type
6691 || ((TYPE_PRECISION (shorter_type)
6692 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6693 && (TYPE_UNSIGNED (shorter_type)
6694 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6695 || (TREE_CODE (arg1_unw) == INTEGER_CST
6696 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6697 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6698 && int_fits_type_p (arg1_unw, shorter_type))))
6699 return fold_build2_loc (loc, code, type, arg0_unw,
6700 fold_convert_loc (loc, shorter_type, arg1_unw));
6702 if (TREE_CODE (arg1_unw) != INTEGER_CST
6703 || TREE_CODE (shorter_type) != INTEGER_TYPE
6704 || !int_fits_type_p (arg1_unw, shorter_type))
6707 /* If we are comparing with the integer that does not fit into the range
6708 of the shorter type, the result is known. */
6709 outer_type = TREE_TYPE (arg1_unw);
6710 min = lower_bound_in_type (outer_type, shorter_type);
6711 max = upper_bound_in_type (outer_type, shorter_type);
6713 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6715 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6722 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6727 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6733 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6735 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6740 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6742 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6751 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6752 ARG0 just the signedness is changed. */
6755 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6756 tree arg0, tree arg1)
6759 tree inner_type, outer_type;
6761 if (!CONVERT_EXPR_P (arg0))
6764 outer_type = TREE_TYPE (arg0);
6765 arg0_inner = TREE_OPERAND (arg0, 0);
6766 inner_type = TREE_TYPE (arg0_inner);
6768 #ifdef HAVE_canonicalize_funcptr_for_compare
6769 /* Disable this optimization if we're casting a function pointer
6770 type on targets that require function pointer canonicalization. */
6771 if (HAVE_canonicalize_funcptr_for_compare
6772 && TREE_CODE (inner_type) == POINTER_TYPE
6773 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6777 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6780 if (TREE_CODE (arg1) != INTEGER_CST
6781 && !(CONVERT_EXPR_P (arg1)
6782 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6785 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6786 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6791 if (TREE_CODE (arg1) == INTEGER_CST)
6792 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6793 0, TREE_OVERFLOW (arg1));
6795 arg1 = fold_convert_loc (loc, inner_type, arg1);
6797 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6800 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6801 step of the array. Reconstructs s and delta in the case of s *
6802 delta being an integer constant (and thus already folded). ADDR is
6803 the address. MULT is the multiplicative expression. If the
6804 function succeeds, the new address expression is returned.
6805 Otherwise NULL_TREE is returned. LOC is the location of the
6806 resulting expression. */
6809 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6811 tree s, delta, step;
6812 tree ref = TREE_OPERAND (addr, 0), pref;
6817 /* Strip the nops that might be added when converting op1 to sizetype. */
6820 /* Canonicalize op1 into a possibly non-constant delta
6821 and an INTEGER_CST s. */
6822 if (TREE_CODE (op1) == MULT_EXPR)
6824 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6829 if (TREE_CODE (arg0) == INTEGER_CST)
6834 else if (TREE_CODE (arg1) == INTEGER_CST)
6842 else if (TREE_CODE (op1) == INTEGER_CST)
6849 /* Simulate we are delta * 1. */
6851 s = integer_one_node;
6854 for (;; ref = TREE_OPERAND (ref, 0))
6856 if (TREE_CODE (ref) == ARRAY_REF)
6860 /* Remember if this was a multi-dimensional array. */
6861 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6864 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6867 itype = TREE_TYPE (domain);
6869 step = array_ref_element_size (ref);
6870 if (TREE_CODE (step) != INTEGER_CST)
6875 if (! tree_int_cst_equal (step, s))
6880 /* Try if delta is a multiple of step. */
6881 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6887 /* Only fold here if we can verify we do not overflow one
6888 dimension of a multi-dimensional array. */
6893 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6894 || !TYPE_MAX_VALUE (domain)
6895 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6898 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6899 fold_convert_loc (loc, itype,
6900 TREE_OPERAND (ref, 1)),
6901 fold_convert_loc (loc, itype, delta));
6903 || TREE_CODE (tmp) != INTEGER_CST
6904 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6910 else if (TREE_CODE (ref) == COMPONENT_REF
6911 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6915 /* Remember if this was a multi-dimensional array. */
6916 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6919 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6922 itype = TREE_TYPE (domain);
6924 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6925 if (TREE_CODE (step) != INTEGER_CST)
6930 if (! tree_int_cst_equal (step, s))
6935 /* Try if delta is a multiple of step. */
6936 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6942 /* Only fold here if we can verify we do not overflow one
6943 dimension of a multi-dimensional array. */
6948 if (!TYPE_MIN_VALUE (domain)
6949 || !TYPE_MAX_VALUE (domain)
6950 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6953 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6954 fold_convert_loc (loc, itype,
6955 TYPE_MIN_VALUE (domain)),
6956 fold_convert_loc (loc, itype, delta));
6957 if (TREE_CODE (tmp) != INTEGER_CST
6958 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6967 if (!handled_component_p (ref))
6971 /* We found the suitable array reference. So copy everything up to it,
6972 and replace the index. */
6974 pref = TREE_OPERAND (addr, 0);
6975 ret = copy_node (pref);
6976 SET_EXPR_LOCATION (ret, loc);
6981 pref = TREE_OPERAND (pref, 0);
6982 TREE_OPERAND (pos, 0) = copy_node (pref);
6983 pos = TREE_OPERAND (pos, 0);
6986 if (TREE_CODE (ref) == ARRAY_REF)
6988 TREE_OPERAND (pos, 1)
6989 = fold_build2_loc (loc, PLUS_EXPR, itype,
6990 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6991 fold_convert_loc (loc, itype, delta));
6992 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6994 else if (TREE_CODE (ref) == COMPONENT_REF)
6996 gcc_assert (ret == pos);
6997 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6999 (loc, PLUS_EXPR, itype,
7000 fold_convert_loc (loc, itype,
7002 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7003 fold_convert_loc (loc, itype, delta)),
7004 NULL_TREE, NULL_TREE);
7005 return build_fold_addr_expr_loc (loc, ret);
7012 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7013 means A >= Y && A != MAX, but in this case we know that
7014 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7017 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7019 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7021 if (TREE_CODE (bound) == LT_EXPR)
7022 a = TREE_OPERAND (bound, 0);
7023 else if (TREE_CODE (bound) == GT_EXPR)
7024 a = TREE_OPERAND (bound, 1);
7028 typea = TREE_TYPE (a);
7029 if (!INTEGRAL_TYPE_P (typea)
7030 && !POINTER_TYPE_P (typea))
7033 if (TREE_CODE (ineq) == LT_EXPR)
7035 a1 = TREE_OPERAND (ineq, 1);
7036 y = TREE_OPERAND (ineq, 0);
7038 else if (TREE_CODE (ineq) == GT_EXPR)
7040 a1 = TREE_OPERAND (ineq, 0);
7041 y = TREE_OPERAND (ineq, 1);
7046 if (TREE_TYPE (a1) != typea)
7049 if (POINTER_TYPE_P (typea))
7051 /* Convert the pointer types into integer before taking the difference. */
7052 tree ta = fold_convert_loc (loc, ssizetype, a);
7053 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7054 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7057 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7059 if (!diff || !integer_onep (diff))
7062 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7065 /* Fold a sum or difference of at least one multiplication.
7066 Returns the folded tree or NULL if no simplification could be made. */
7069 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7070 tree arg0, tree arg1)
7072 tree arg00, arg01, arg10, arg11;
7073 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7075 /* (A * C) +- (B * C) -> (A+-B) * C.
7076 (A * C) +- A -> A * (C+-1).
7077 We are most concerned about the case where C is a constant,
7078 but other combinations show up during loop reduction. Since
7079 it is not difficult, try all four possibilities. */
7081 if (TREE_CODE (arg0) == MULT_EXPR)
7083 arg00 = TREE_OPERAND (arg0, 0);
7084 arg01 = TREE_OPERAND (arg0, 1);
7086 else if (TREE_CODE (arg0) == INTEGER_CST)
7088 arg00 = build_one_cst (type);
7093 /* We cannot generate constant 1 for fract. */
7094 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7097 arg01 = build_one_cst (type);
7099 if (TREE_CODE (arg1) == MULT_EXPR)
7101 arg10 = TREE_OPERAND (arg1, 0);
7102 arg11 = TREE_OPERAND (arg1, 1);
7104 else if (TREE_CODE (arg1) == INTEGER_CST)
7106 arg10 = build_one_cst (type);
7107 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7108 the purpose of this canonicalization. */
7109 if (TREE_INT_CST_HIGH (arg1) == -1
7110 && negate_expr_p (arg1)
7111 && code == PLUS_EXPR)
7113 arg11 = negate_expr (arg1);
7121 /* We cannot generate constant 1 for fract. */
7122 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7125 arg11 = build_one_cst (type);
7129 if (operand_equal_p (arg01, arg11, 0))
7130 same = arg01, alt0 = arg00, alt1 = arg10;
7131 else if (operand_equal_p (arg00, arg10, 0))
7132 same = arg00, alt0 = arg01, alt1 = arg11;
7133 else if (operand_equal_p (arg00, arg11, 0))
7134 same = arg00, alt0 = arg01, alt1 = arg10;
7135 else if (operand_equal_p (arg01, arg10, 0))
7136 same = arg01, alt0 = arg00, alt1 = arg11;
7138 /* No identical multiplicands; see if we can find a common
7139 power-of-two factor in non-power-of-two multiplies. This
7140 can help in multi-dimensional array access. */
7141 else if (host_integerp (arg01, 0)
7142 && host_integerp (arg11, 0))
7144 HOST_WIDE_INT int01, int11, tmp;
7147 int01 = TREE_INT_CST_LOW (arg01);
7148 int11 = TREE_INT_CST_LOW (arg11);
7150 /* Move min of absolute values to int11. */
7151 if (absu_hwi (int01) < absu_hwi (int11))
7153 tmp = int01, int01 = int11, int11 = tmp;
7154 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7161 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7162 /* The remainder should not be a constant, otherwise we
7163 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7164 increased the number of multiplications necessary. */
7165 && TREE_CODE (arg10) != INTEGER_CST)
7167 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7168 build_int_cst (TREE_TYPE (arg00),
7173 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7178 return fold_build2_loc (loc, MULT_EXPR, type,
7179 fold_build2_loc (loc, code, type,
7180 fold_convert_loc (loc, type, alt0),
7181 fold_convert_loc (loc, type, alt1)),
7182 fold_convert_loc (loc, type, same));
7187 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7188 specified by EXPR into the buffer PTR of length LEN bytes.
7189 Return the number of bytes placed in the buffer, or zero
7193 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7195 tree type = TREE_TYPE (expr);
7196 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7197 int byte, offset, word, words;
7198 unsigned char value;
7200 if (total_bytes > len)
7202 words = total_bytes / UNITS_PER_WORD;
7204 for (byte = 0; byte < total_bytes; byte++)
7206 int bitpos = byte * BITS_PER_UNIT;
7207 if (bitpos < HOST_BITS_PER_WIDE_INT)
7208 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7210 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7211 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7213 if (total_bytes > UNITS_PER_WORD)
7215 word = byte / UNITS_PER_WORD;
7216 if (WORDS_BIG_ENDIAN)
7217 word = (words - 1) - word;
7218 offset = word * UNITS_PER_WORD;
7219 if (BYTES_BIG_ENDIAN)
7220 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7222 offset += byte % UNITS_PER_WORD;
7225 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7226 ptr[offset] = value;
7232 /* Subroutine of native_encode_expr. Encode the REAL_CST
7233 specified by EXPR into the buffer PTR of length LEN bytes.
7234 Return the number of bytes placed in the buffer, or zero
7238 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7240 tree type = TREE_TYPE (expr);
7241 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7242 int byte, offset, word, words, bitpos;
7243 unsigned char value;
7245 /* There are always 32 bits in each long, no matter the size of
7246 the hosts long. We handle floating point representations with
7250 if (total_bytes > len)
7252 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7254 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7256 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7257 bitpos += BITS_PER_UNIT)
7259 byte = (bitpos / BITS_PER_UNIT) & 3;
7260 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7262 if (UNITS_PER_WORD < 4)
7264 word = byte / UNITS_PER_WORD;
7265 if (WORDS_BIG_ENDIAN)
7266 word = (words - 1) - word;
7267 offset = word * UNITS_PER_WORD;
7268 if (BYTES_BIG_ENDIAN)
7269 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7271 offset += byte % UNITS_PER_WORD;
7274 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7275 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7280 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7281 specified by EXPR into the buffer PTR of length LEN bytes.
7282 Return the number of bytes placed in the buffer, or zero
7286 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7291 part = TREE_REALPART (expr);
7292 rsize = native_encode_expr (part, ptr, len);
7295 part = TREE_IMAGPART (expr);
7296 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7299 return rsize + isize;
7303 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7304 specified by EXPR into the buffer PTR of length LEN bytes.
7305 Return the number of bytes placed in the buffer, or zero
7309 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7311 int i, size, offset, count;
7312 tree itype, elem, elements;
7315 elements = TREE_VECTOR_CST_ELTS (expr);
7316 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7317 itype = TREE_TYPE (TREE_TYPE (expr));
7318 size = GET_MODE_SIZE (TYPE_MODE (itype));
7319 for (i = 0; i < count; i++)
7323 elem = TREE_VALUE (elements);
7324 elements = TREE_CHAIN (elements);
7331 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7336 if (offset + size > len)
7338 memset (ptr+offset, 0, size);
7346 /* Subroutine of native_encode_expr. Encode the STRING_CST
7347 specified by EXPR into the buffer PTR of length LEN bytes.
7348 Return the number of bytes placed in the buffer, or zero
7352 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7354 tree type = TREE_TYPE (expr);
7355 HOST_WIDE_INT total_bytes;
7357 if (TREE_CODE (type) != ARRAY_TYPE
7358 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7359 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7360 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7362 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7363 if (total_bytes > len)
7365 if (TREE_STRING_LENGTH (expr) < total_bytes)
7367 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7368 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7369 total_bytes - TREE_STRING_LENGTH (expr));
7372 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7377 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7378 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7379 buffer PTR of length LEN bytes. Return the number of bytes
7380 placed in the buffer, or zero upon failure. */
7383 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7385 switch (TREE_CODE (expr))
7388 return native_encode_int (expr, ptr, len);
7391 return native_encode_real (expr, ptr, len);
7394 return native_encode_complex (expr, ptr, len);
7397 return native_encode_vector (expr, ptr, len);
7400 return native_encode_string (expr, ptr, len);
7408 /* Subroutine of native_interpret_expr. Interpret the contents of
7409 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7410 If the buffer cannot be interpreted, return NULL_TREE. */
7413 native_interpret_int (tree type, const unsigned char *ptr, int len)
7415 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7416 int byte, offset, word, words;
7417 unsigned char value;
7420 if (total_bytes > len)
7422 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7425 result = double_int_zero;
7426 words = total_bytes / UNITS_PER_WORD;
7428 for (byte = 0; byte < total_bytes; byte++)
7430 int bitpos = byte * BITS_PER_UNIT;
7431 if (total_bytes > UNITS_PER_WORD)
7433 word = byte / UNITS_PER_WORD;
7434 if (WORDS_BIG_ENDIAN)
7435 word = (words - 1) - word;
7436 offset = word * UNITS_PER_WORD;
7437 if (BYTES_BIG_ENDIAN)
7438 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7440 offset += byte % UNITS_PER_WORD;
7443 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7444 value = ptr[offset];
7446 if (bitpos < HOST_BITS_PER_WIDE_INT)
7447 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7449 result.high |= (unsigned HOST_WIDE_INT) value
7450 << (bitpos - HOST_BITS_PER_WIDE_INT);
7453 return double_int_to_tree (type, result);
7457 /* Subroutine of native_interpret_expr. Interpret the contents of
7458 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7459 If the buffer cannot be interpreted, return NULL_TREE. */
7462 native_interpret_real (tree type, const unsigned char *ptr, int len)
7464 enum machine_mode mode = TYPE_MODE (type);
7465 int total_bytes = GET_MODE_SIZE (mode);
7466 int byte, offset, word, words, bitpos;
7467 unsigned char value;
7468 /* There are always 32 bits in each long, no matter the size of
7469 the hosts long. We handle floating point representations with
7474 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7475 if (total_bytes > len || total_bytes > 24)
7477 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7479 memset (tmp, 0, sizeof (tmp));
7480 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7481 bitpos += BITS_PER_UNIT)
7483 byte = (bitpos / BITS_PER_UNIT) & 3;
7484 if (UNITS_PER_WORD < 4)
7486 word = byte / UNITS_PER_WORD;
7487 if (WORDS_BIG_ENDIAN)
7488 word = (words - 1) - word;
7489 offset = word * UNITS_PER_WORD;
7490 if (BYTES_BIG_ENDIAN)
7491 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7493 offset += byte % UNITS_PER_WORD;
7496 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7497 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7499 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7502 real_from_target (&r, tmp, mode);
7503 return build_real (type, r);
7507 /* Subroutine of native_interpret_expr. Interpret the contents of
7508 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7509 If the buffer cannot be interpreted, return NULL_TREE. */
7512 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7514 tree etype, rpart, ipart;
7517 etype = TREE_TYPE (type);
7518 size = GET_MODE_SIZE (TYPE_MODE (etype));
7521 rpart = native_interpret_expr (etype, ptr, size);
7524 ipart = native_interpret_expr (etype, ptr+size, size);
7527 return build_complex (type, rpart, ipart);
7531 /* Subroutine of native_interpret_expr. Interpret the contents of
7532 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7533 If the buffer cannot be interpreted, return NULL_TREE. */
7536 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7538 tree etype, elem, elements;
7541 etype = TREE_TYPE (type);
7542 size = GET_MODE_SIZE (TYPE_MODE (etype));
7543 count = TYPE_VECTOR_SUBPARTS (type);
7544 if (size * count > len)
7547 elements = NULL_TREE;
7548 for (i = count - 1; i >= 0; i--)
7550 elem = native_interpret_expr (etype, ptr+(i*size), size);
7553 elements = tree_cons (NULL_TREE, elem, elements);
7555 return build_vector (type, elements);
7559 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7560 the buffer PTR of length LEN as a constant of type TYPE. For
7561 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7562 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7563 return NULL_TREE. */
7566 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7568 switch (TREE_CODE (type))
7573 return native_interpret_int (type, ptr, len);
7576 return native_interpret_real (type, ptr, len);
7579 return native_interpret_complex (type, ptr, len);
7582 return native_interpret_vector (type, ptr, len);
7590 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7591 TYPE at compile-time. If we're unable to perform the conversion
7592 return NULL_TREE. */
7595 fold_view_convert_expr (tree type, tree expr)
7597 /* We support up to 512-bit values (for V8DFmode). */
7598 unsigned char buffer[64];
7601 /* Check that the host and target are sane. */
7602 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7605 len = native_encode_expr (expr, buffer, sizeof (buffer));
7609 return native_interpret_expr (type, buffer, len);
7612 /* Build an expression for the address of T. Folds away INDIRECT_REF
7613 to avoid confusing the gimplify process. */
7616 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7618 /* The size of the object is not relevant when talking about its address. */
7619 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7620 t = TREE_OPERAND (t, 0);
7622 if (TREE_CODE (t) == INDIRECT_REF)
7624 t = TREE_OPERAND (t, 0);
7626 if (TREE_TYPE (t) != ptrtype)
7627 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7629 else if (TREE_CODE (t) == MEM_REF
7630 && integer_zerop (TREE_OPERAND (t, 1)))
7631 return TREE_OPERAND (t, 0);
7632 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7634 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7636 if (TREE_TYPE (t) != ptrtype)
7637 t = fold_convert_loc (loc, ptrtype, t);
7640 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7645 /* Build an expression for the address of T. */
7648 build_fold_addr_expr_loc (location_t loc, tree t)
7650 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7652 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7655 /* Fold a unary expression of code CODE and type TYPE with operand
7656 OP0. Return the folded expression if folding is successful.
7657 Otherwise, return NULL_TREE. */
7660 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7664 enum tree_code_class kind = TREE_CODE_CLASS (code);
7666 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7667 && TREE_CODE_LENGTH (code) == 1);
7672 if (CONVERT_EXPR_CODE_P (code)
7673 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7675 /* Don't use STRIP_NOPS, because signedness of argument type
7677 STRIP_SIGN_NOPS (arg0);
7681 /* Strip any conversions that don't change the mode. This
7682 is safe for every expression, except for a comparison
7683 expression because its signedness is derived from its
7686 Note that this is done as an internal manipulation within
7687 the constant folder, in order to find the simplest
7688 representation of the arguments so that their form can be
7689 studied. In any cases, the appropriate type conversions
7690 should be put back in the tree that will get out of the
7696 if (TREE_CODE_CLASS (code) == tcc_unary)
7698 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7699 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7700 fold_build1_loc (loc, code, type,
7701 fold_convert_loc (loc, TREE_TYPE (op0),
7702 TREE_OPERAND (arg0, 1))));
7703 else if (TREE_CODE (arg0) == COND_EXPR)
7705 tree arg01 = TREE_OPERAND (arg0, 1);
7706 tree arg02 = TREE_OPERAND (arg0, 2);
7707 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7708 arg01 = fold_build1_loc (loc, code, type,
7709 fold_convert_loc (loc,
7710 TREE_TYPE (op0), arg01));
7711 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7712 arg02 = fold_build1_loc (loc, code, type,
7713 fold_convert_loc (loc,
7714 TREE_TYPE (op0), arg02));
7715 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7718 /* If this was a conversion, and all we did was to move into
7719 inside the COND_EXPR, bring it back out. But leave it if
7720 it is a conversion from integer to integer and the
7721 result precision is no wider than a word since such a
7722 conversion is cheap and may be optimized away by combine,
7723 while it couldn't if it were outside the COND_EXPR. Then return
7724 so we don't get into an infinite recursion loop taking the
7725 conversion out and then back in. */
7727 if ((CONVERT_EXPR_CODE_P (code)
7728 || code == NON_LVALUE_EXPR)
7729 && TREE_CODE (tem) == COND_EXPR
7730 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7731 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7732 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7733 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7734 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7735 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7736 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7738 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7739 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7740 || flag_syntax_only))
7741 tem = build1_loc (loc, code, type,
7743 TREE_TYPE (TREE_OPERAND
7744 (TREE_OPERAND (tem, 1), 0)),
7745 TREE_OPERAND (tem, 0),
7746 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7747 TREE_OPERAND (TREE_OPERAND (tem, 2),
7756 /* Re-association barriers around constants and other re-association
7757 barriers can be removed. */
7758 if (CONSTANT_CLASS_P (op0)
7759 || TREE_CODE (op0) == PAREN_EXPR)
7760 return fold_convert_loc (loc, type, op0);
7765 case FIX_TRUNC_EXPR:
7766 if (TREE_TYPE (op0) == type)
7769 if (COMPARISON_CLASS_P (op0))
7771 /* If we have (type) (a CMP b) and type is an integral type, return
7772 new expression involving the new type. Canonicalize
7773 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7775 Do not fold the result as that would not simplify further, also
7776 folding again results in recursions. */
7777 if (TREE_CODE (type) == BOOLEAN_TYPE)
7778 return build2_loc (loc, TREE_CODE (op0), type,
7779 TREE_OPERAND (op0, 0),
7780 TREE_OPERAND (op0, 1));
7781 else if (!INTEGRAL_TYPE_P (type))
7782 return build3_loc (loc, COND_EXPR, type, op0,
7783 constant_boolean_node (true, type),
7784 constant_boolean_node (false, type));
7787 /* Handle cases of two conversions in a row. */
7788 if (CONVERT_EXPR_P (op0))
7790 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7791 tree inter_type = TREE_TYPE (op0);
7792 int inside_int = INTEGRAL_TYPE_P (inside_type);
7793 int inside_ptr = POINTER_TYPE_P (inside_type);
7794 int inside_float = FLOAT_TYPE_P (inside_type);
7795 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7796 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7797 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7798 int inter_int = INTEGRAL_TYPE_P (inter_type);
7799 int inter_ptr = POINTER_TYPE_P (inter_type);
7800 int inter_float = FLOAT_TYPE_P (inter_type);
7801 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7802 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7803 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7804 int final_int = INTEGRAL_TYPE_P (type);
7805 int final_ptr = POINTER_TYPE_P (type);
7806 int final_float = FLOAT_TYPE_P (type);
7807 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7808 unsigned int final_prec = TYPE_PRECISION (type);
7809 int final_unsignedp = TYPE_UNSIGNED (type);
7811 /* In addition to the cases of two conversions in a row
7812 handled below, if we are converting something to its own
7813 type via an object of identical or wider precision, neither
7814 conversion is needed. */
7815 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7816 && (((inter_int || inter_ptr) && final_int)
7817 || (inter_float && final_float))
7818 && inter_prec >= final_prec)
7819 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7821 /* Likewise, if the intermediate and initial types are either both
7822 float or both integer, we don't need the middle conversion if the
7823 former is wider than the latter and doesn't change the signedness
7824 (for integers). Avoid this if the final type is a pointer since
7825 then we sometimes need the middle conversion. Likewise if the
7826 final type has a precision not equal to the size of its mode. */
7827 if (((inter_int && inside_int)
7828 || (inter_float && inside_float)
7829 || (inter_vec && inside_vec))
7830 && inter_prec >= inside_prec
7831 && (inter_float || inter_vec
7832 || inter_unsignedp == inside_unsignedp)
7833 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7834 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7836 && (! final_vec || inter_prec == inside_prec))
7837 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7839 /* If we have a sign-extension of a zero-extended value, we can
7840 replace that by a single zero-extension. */
7841 if (inside_int && inter_int && final_int
7842 && inside_prec < inter_prec && inter_prec < final_prec
7843 && inside_unsignedp && !inter_unsignedp)
7844 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7846 /* Two conversions in a row are not needed unless:
7847 - some conversion is floating-point (overstrict for now), or
7848 - some conversion is a vector (overstrict for now), or
7849 - the intermediate type is narrower than both initial and
7851 - the intermediate type and innermost type differ in signedness,
7852 and the outermost type is wider than the intermediate, or
7853 - the initial type is a pointer type and the precisions of the
7854 intermediate and final types differ, or
7855 - the final type is a pointer type and the precisions of the
7856 initial and intermediate types differ. */
7857 if (! inside_float && ! inter_float && ! final_float
7858 && ! inside_vec && ! inter_vec && ! final_vec
7859 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7860 && ! (inside_int && inter_int
7861 && inter_unsignedp != inside_unsignedp
7862 && inter_prec < final_prec)
7863 && ((inter_unsignedp && inter_prec > inside_prec)
7864 == (final_unsignedp && final_prec > inter_prec))
7865 && ! (inside_ptr && inter_prec != final_prec)
7866 && ! (final_ptr && inside_prec != inter_prec)
7867 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7868 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7869 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7872 /* Handle (T *)&A.B.C for A being of type T and B and C
7873 living at offset zero. This occurs frequently in
7874 C++ upcasting and then accessing the base. */
7875 if (TREE_CODE (op0) == ADDR_EXPR
7876 && POINTER_TYPE_P (type)
7877 && handled_component_p (TREE_OPERAND (op0, 0)))
7879 HOST_WIDE_INT bitsize, bitpos;
7881 enum machine_mode mode;
7882 int unsignedp, volatilep;
7883 tree base = TREE_OPERAND (op0, 0);
7884 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7885 &mode, &unsignedp, &volatilep, false);
7886 /* If the reference was to a (constant) zero offset, we can use
7887 the address of the base if it has the same base type
7888 as the result type and the pointer type is unqualified. */
7889 if (! offset && bitpos == 0
7890 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7891 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7892 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7893 return fold_convert_loc (loc, type,
7894 build_fold_addr_expr_loc (loc, base));
7897 if (TREE_CODE (op0) == MODIFY_EXPR
7898 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7899 /* Detect assigning a bitfield. */
7900 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7902 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7904 /* Don't leave an assignment inside a conversion
7905 unless assigning a bitfield. */
7906 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7907 /* First do the assignment, then return converted constant. */
7908 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7909 TREE_NO_WARNING (tem) = 1;
7910 TREE_USED (tem) = 1;
7914 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7915 constants (if x has signed type, the sign bit cannot be set
7916 in c). This folds extension into the BIT_AND_EXPR.
7917 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7918 very likely don't have maximal range for their precision and this
7919 transformation effectively doesn't preserve non-maximal ranges. */
7920 if (TREE_CODE (type) == INTEGER_TYPE
7921 && TREE_CODE (op0) == BIT_AND_EXPR
7922 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7924 tree and_expr = op0;
7925 tree and0 = TREE_OPERAND (and_expr, 0);
7926 tree and1 = TREE_OPERAND (and_expr, 1);
7929 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7930 || (TYPE_PRECISION (type)
7931 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7933 else if (TYPE_PRECISION (TREE_TYPE (and1))
7934 <= HOST_BITS_PER_WIDE_INT
7935 && host_integerp (and1, 1))
7937 unsigned HOST_WIDE_INT cst;
7939 cst = tree_low_cst (and1, 1);
7940 cst &= (HOST_WIDE_INT) -1
7941 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7942 change = (cst == 0);
7943 #ifdef LOAD_EXTEND_OP
7945 && !flag_syntax_only
7946 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7949 tree uns = unsigned_type_for (TREE_TYPE (and0));
7950 and0 = fold_convert_loc (loc, uns, and0);
7951 and1 = fold_convert_loc (loc, uns, and1);
7957 tem = force_fit_type_double (type, tree_to_double_int (and1),
7958 0, TREE_OVERFLOW (and1));
7959 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7960 fold_convert_loc (loc, type, and0), tem);
7964 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7965 when one of the new casts will fold away. Conservatively we assume
7966 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7967 if (POINTER_TYPE_P (type)
7968 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7969 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7970 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7971 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7972 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7974 tree arg00 = TREE_OPERAND (arg0, 0);
7975 tree arg01 = TREE_OPERAND (arg0, 1);
7977 return fold_build_pointer_plus_loc
7978 (loc, fold_convert_loc (loc, type, arg00), arg01);
7981 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7982 of the same precision, and X is an integer type not narrower than
7983 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7984 if (INTEGRAL_TYPE_P (type)
7985 && TREE_CODE (op0) == BIT_NOT_EXPR
7986 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7987 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7988 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7990 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7991 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7992 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7993 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7994 fold_convert_loc (loc, type, tem));
7997 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7998 type of X and Y (integer types only). */
7999 if (INTEGRAL_TYPE_P (type)
8000 && TREE_CODE (op0) == MULT_EXPR
8001 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8002 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8004 /* Be careful not to introduce new overflows. */
8006 if (TYPE_OVERFLOW_WRAPS (type))
8009 mult_type = unsigned_type_for (type);
8011 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8013 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8014 fold_convert_loc (loc, mult_type,
8015 TREE_OPERAND (op0, 0)),
8016 fold_convert_loc (loc, mult_type,
8017 TREE_OPERAND (op0, 1)));
8018 return fold_convert_loc (loc, type, tem);
8022 tem = fold_convert_const (code, type, op0);
8023 return tem ? tem : NULL_TREE;
8025 case ADDR_SPACE_CONVERT_EXPR:
8026 if (integer_zerop (arg0))
8027 return fold_convert_const (code, type, arg0);
8030 case FIXED_CONVERT_EXPR:
8031 tem = fold_convert_const (code, type, arg0);
8032 return tem ? tem : NULL_TREE;
8034 case VIEW_CONVERT_EXPR:
8035 if (TREE_TYPE (op0) == type)
8037 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8038 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8039 type, TREE_OPERAND (op0, 0));
8040 if (TREE_CODE (op0) == MEM_REF)
8041 return fold_build2_loc (loc, MEM_REF, type,
8042 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8044 /* For integral conversions with the same precision or pointer
8045 conversions use a NOP_EXPR instead. */
8046 if ((INTEGRAL_TYPE_P (type)
8047 || POINTER_TYPE_P (type))
8048 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8049 || POINTER_TYPE_P (TREE_TYPE (op0)))
8050 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8051 return fold_convert_loc (loc, type, op0);
8053 /* Strip inner integral conversions that do not change the precision. */
8054 if (CONVERT_EXPR_P (op0)
8055 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8056 || POINTER_TYPE_P (TREE_TYPE (op0)))
8057 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8058 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8059 && (TYPE_PRECISION (TREE_TYPE (op0))
8060 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8061 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8062 type, TREE_OPERAND (op0, 0));
8064 return fold_view_convert_expr (type, op0);
8067 tem = fold_negate_expr (loc, arg0);
8069 return fold_convert_loc (loc, type, tem);
8073 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8074 return fold_abs_const (arg0, type);
8075 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8076 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8077 /* Convert fabs((double)float) into (double)fabsf(float). */
8078 else if (TREE_CODE (arg0) == NOP_EXPR
8079 && TREE_CODE (type) == REAL_TYPE)
8081 tree targ0 = strip_float_extensions (arg0);
8083 return fold_convert_loc (loc, type,
8084 fold_build1_loc (loc, ABS_EXPR,
8088 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8089 else if (TREE_CODE (arg0) == ABS_EXPR)
8091 else if (tree_expr_nonnegative_p (arg0))
8094 /* Strip sign ops from argument. */
8095 if (TREE_CODE (type) == REAL_TYPE)
8097 tem = fold_strip_sign_ops (arg0);
8099 return fold_build1_loc (loc, ABS_EXPR, type,
8100 fold_convert_loc (loc, type, tem));
8105 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8106 return fold_convert_loc (loc, type, arg0);
8107 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8109 tree itype = TREE_TYPE (type);
8110 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8111 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8112 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8113 negate_expr (ipart));
8115 if (TREE_CODE (arg0) == COMPLEX_CST)
8117 tree itype = TREE_TYPE (type);
8118 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8119 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8120 return build_complex (type, rpart, negate_expr (ipart));
8122 if (TREE_CODE (arg0) == CONJ_EXPR)
8123 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8127 if (TREE_CODE (arg0) == INTEGER_CST)
8128 return fold_not_const (arg0, type);
8129 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8130 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8131 /* Convert ~ (-A) to A - 1. */
8132 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8133 return fold_build2_loc (loc, MINUS_EXPR, type,
8134 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8135 build_int_cst (type, 1));
8136 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8137 else if (INTEGRAL_TYPE_P (type)
8138 && ((TREE_CODE (arg0) == MINUS_EXPR
8139 && integer_onep (TREE_OPERAND (arg0, 1)))
8140 || (TREE_CODE (arg0) == PLUS_EXPR
8141 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8142 return fold_build1_loc (loc, NEGATE_EXPR, type,
8143 fold_convert_loc (loc, type,
8144 TREE_OPERAND (arg0, 0)));
8145 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8146 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8147 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8148 fold_convert_loc (loc, type,
8149 TREE_OPERAND (arg0, 0)))))
8150 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8151 fold_convert_loc (loc, type,
8152 TREE_OPERAND (arg0, 1)));
8153 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8154 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8155 fold_convert_loc (loc, type,
8156 TREE_OPERAND (arg0, 1)))))
8157 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8158 fold_convert_loc (loc, type,
8159 TREE_OPERAND (arg0, 0)), tem);
8160 /* Perform BIT_NOT_EXPR on each element individually. */
8161 else if (TREE_CODE (arg0) == VECTOR_CST)
8163 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8164 int count = TYPE_VECTOR_SUBPARTS (type), i;
8166 for (i = 0; i < count; i++)
8170 elem = TREE_VALUE (elements);
8171 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8172 if (elem == NULL_TREE)
8174 elements = TREE_CHAIN (elements);
8177 elem = build_int_cst (TREE_TYPE (type), -1);
8178 list = tree_cons (NULL_TREE, elem, list);
8181 return build_vector (type, nreverse (list));
8186 case TRUTH_NOT_EXPR:
8187 /* The argument to invert_truthvalue must have Boolean type. */
8188 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8189 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8191 /* Note that the operand of this must be an int
8192 and its values must be 0 or 1.
8193 ("true" is a fixed value perhaps depending on the language,
8194 but we don't handle values other than 1 correctly yet.) */
8195 tem = fold_truth_not_expr (loc, arg0);
8198 return fold_convert_loc (loc, type, tem);
8201 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8202 return fold_convert_loc (loc, type, arg0);
8203 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8204 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8205 TREE_OPERAND (arg0, 1));
8206 if (TREE_CODE (arg0) == COMPLEX_CST)
8207 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8208 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8210 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8211 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8212 fold_build1_loc (loc, REALPART_EXPR, itype,
8213 TREE_OPERAND (arg0, 0)),
8214 fold_build1_loc (loc, REALPART_EXPR, itype,
8215 TREE_OPERAND (arg0, 1)));
8216 return fold_convert_loc (loc, type, tem);
8218 if (TREE_CODE (arg0) == CONJ_EXPR)
8220 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8221 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8222 TREE_OPERAND (arg0, 0));
8223 return fold_convert_loc (loc, type, tem);
8225 if (TREE_CODE (arg0) == CALL_EXPR)
8227 tree fn = get_callee_fndecl (arg0);
8228 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8229 switch (DECL_FUNCTION_CODE (fn))
8231 CASE_FLT_FN (BUILT_IN_CEXPI):
8232 fn = mathfn_built_in (type, BUILT_IN_COS);
8234 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8244 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8245 return build_zero_cst (type);
8246 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8247 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8248 TREE_OPERAND (arg0, 0));
8249 if (TREE_CODE (arg0) == COMPLEX_CST)
8250 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8251 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8253 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8254 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8255 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8256 TREE_OPERAND (arg0, 0)),
8257 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8258 TREE_OPERAND (arg0, 1)));
8259 return fold_convert_loc (loc, type, tem);
8261 if (TREE_CODE (arg0) == CONJ_EXPR)
8263 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8264 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8265 return fold_convert_loc (loc, type, negate_expr (tem));
8267 if (TREE_CODE (arg0) == CALL_EXPR)
8269 tree fn = get_callee_fndecl (arg0);
8270 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8271 switch (DECL_FUNCTION_CODE (fn))
8273 CASE_FLT_FN (BUILT_IN_CEXPI):
8274 fn = mathfn_built_in (type, BUILT_IN_SIN);
8276 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8286 /* Fold *&X to X if X is an lvalue. */
8287 if (TREE_CODE (op0) == ADDR_EXPR)
8289 tree op00 = TREE_OPERAND (op0, 0);
8290 if ((TREE_CODE (op00) == VAR_DECL
8291 || TREE_CODE (op00) == PARM_DECL
8292 || TREE_CODE (op00) == RESULT_DECL)
8293 && !TREE_READONLY (op00))
8300 } /* switch (code) */
8304 /* If the operation was a conversion do _not_ mark a resulting constant
8305 with TREE_OVERFLOW if the original constant was not. These conversions
8306 have implementation defined behavior and retaining the TREE_OVERFLOW
8307 flag here would confuse later passes such as VRP. */
8309 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8310 tree type, tree op0)
8312 tree res = fold_unary_loc (loc, code, type, op0);
8314 && TREE_CODE (res) == INTEGER_CST
8315 && TREE_CODE (op0) == INTEGER_CST
8316 && CONVERT_EXPR_CODE_P (code))
8317 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8322 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8323 operands OP0 and OP1. LOC is the location of the resulting expression.
8324 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8325 Return the folded expression if folding is successful. Otherwise,
8326 return NULL_TREE. */
8328 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8329 tree arg0, tree arg1, tree op0, tree op1)
8333 /* We only do these simplifications if we are optimizing. */
8337 /* Check for things like (A || B) && (A || C). We can convert this
8338 to A || (B && C). Note that either operator can be any of the four
8339 truth and/or operations and the transformation will still be
8340 valid. Also note that we only care about order for the
8341 ANDIF and ORIF operators. If B contains side effects, this
8342 might change the truth-value of A. */
8343 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8344 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8345 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8346 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8347 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8348 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8350 tree a00 = TREE_OPERAND (arg0, 0);
8351 tree a01 = TREE_OPERAND (arg0, 1);
8352 tree a10 = TREE_OPERAND (arg1, 0);
8353 tree a11 = TREE_OPERAND (arg1, 1);
8354 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8355 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8356 && (code == TRUTH_AND_EXPR
8357 || code == TRUTH_OR_EXPR));
8359 if (operand_equal_p (a00, a10, 0))
8360 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8361 fold_build2_loc (loc, code, type, a01, a11));
8362 else if (commutative && operand_equal_p (a00, a11, 0))
8363 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8364 fold_build2_loc (loc, code, type, a01, a10));
8365 else if (commutative && operand_equal_p (a01, a10, 0))
8366 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8367 fold_build2_loc (loc, code, type, a00, a11));
8369 /* This case if tricky because we must either have commutative
8370 operators or else A10 must not have side-effects. */
8372 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8373 && operand_equal_p (a01, a11, 0))
8374 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8375 fold_build2_loc (loc, code, type, a00, a10),
8379 /* See if we can build a range comparison. */
8380 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8383 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8384 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8386 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8388 return fold_build2_loc (loc, code, type, tem, arg1);
8391 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8392 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8394 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8396 return fold_build2_loc (loc, code, type, arg0, tem);
8399 /* Check for the possibility of merging component references. If our
8400 lhs is another similar operation, try to merge its rhs with our
8401 rhs. Then try to merge our lhs and rhs. */
8402 if (TREE_CODE (arg0) == code
8403 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8404 TREE_OPERAND (arg0, 1), arg1)))
8405 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8407 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8410 if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
8412 && LOGICAL_OP_NON_SHORT_CIRCUIT
8413 && (code == TRUTH_AND_EXPR
8414 || code == TRUTH_ANDIF_EXPR
8415 || code == TRUTH_OR_EXPR
8416 || code == TRUTH_ORIF_EXPR))
8418 enum tree_code ncode, icode;
8420 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8421 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8422 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8424 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8425 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8426 We don't want to pack more than two leafs to a non-IF AND/OR
8428 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8429 equal to IF-CODE, then we don't want to add right-hand operand.
8430 If the inner right-hand side of left-hand operand has
8431 side-effects, or isn't simple, then we can't add to it,
8432 as otherwise we might destroy if-sequence. */
8433 if (TREE_CODE (arg0) == icode
8434 && simple_operand_p_2 (arg1)
8435 /* Needed for sequence points to handle trappings, and
8437 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8439 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8441 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8444 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8445 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8446 else if (TREE_CODE (arg1) == icode
8447 && simple_operand_p_2 (arg0)
8448 /* Needed for sequence points to handle trappings, and
8450 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8452 tem = fold_build2_loc (loc, ncode, type,
8453 arg0, TREE_OPERAND (arg1, 0));
8454 return fold_build2_loc (loc, icode, type, tem,
8455 TREE_OPERAND (arg1, 1));
8457 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8459 For sequence point consistancy, we need to check for trapping,
8460 and side-effects. */
8461 else if (code == icode && simple_operand_p_2 (arg0)
8462 && simple_operand_p_2 (arg1))
8463 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8469 /* Fold a binary expression of code CODE and type TYPE with operands
8470 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8471 Return the folded expression if folding is successful. Otherwise,
8472 return NULL_TREE. */
8475 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8477 enum tree_code compl_code;
8479 if (code == MIN_EXPR)
8480 compl_code = MAX_EXPR;
8481 else if (code == MAX_EXPR)
8482 compl_code = MIN_EXPR;
8486 /* MIN (MAX (a, b), b) == b. */
8487 if (TREE_CODE (op0) == compl_code
8488 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8489 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8491 /* MIN (MAX (b, a), b) == b. */
8492 if (TREE_CODE (op0) == compl_code
8493 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8494 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8495 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8497 /* MIN (a, MAX (a, b)) == a. */
8498 if (TREE_CODE (op1) == compl_code
8499 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8500 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8501 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8503 /* MIN (a, MAX (b, a)) == a. */
8504 if (TREE_CODE (op1) == compl_code
8505 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8506 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8507 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8512 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8513 by changing CODE to reduce the magnitude of constants involved in
8514 ARG0 of the comparison.
8515 Returns a canonicalized comparison tree if a simplification was
8516 possible, otherwise returns NULL_TREE.
8517 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8518 valid if signed overflow is undefined. */
8521 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8522 tree arg0, tree arg1,
8523 bool *strict_overflow_p)
8525 enum tree_code code0 = TREE_CODE (arg0);
8526 tree t, cst0 = NULL_TREE;
8530 /* Match A +- CST code arg1 and CST code arg1. We can change the
8531 first form only if overflow is undefined. */
8532 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8533 /* In principle pointers also have undefined overflow behavior,
8534 but that causes problems elsewhere. */
8535 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8536 && (code0 == MINUS_EXPR
8537 || code0 == PLUS_EXPR)
8538 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8539 || code0 == INTEGER_CST))
8542 /* Identify the constant in arg0 and its sign. */
8543 if (code0 == INTEGER_CST)
8546 cst0 = TREE_OPERAND (arg0, 1);
8547 sgn0 = tree_int_cst_sgn (cst0);
8549 /* Overflowed constants and zero will cause problems. */
8550 if (integer_zerop (cst0)
8551 || TREE_OVERFLOW (cst0))
8554 /* See if we can reduce the magnitude of the constant in
8555 arg0 by changing the comparison code. */
8556 if (code0 == INTEGER_CST)
8558 /* CST <= arg1 -> CST-1 < arg1. */
8559 if (code == LE_EXPR && sgn0 == 1)
8561 /* -CST < arg1 -> -CST-1 <= arg1. */
8562 else if (code == LT_EXPR && sgn0 == -1)
8564 /* CST > arg1 -> CST-1 >= arg1. */
8565 else if (code == GT_EXPR && sgn0 == 1)
8567 /* -CST >= arg1 -> -CST-1 > arg1. */
8568 else if (code == GE_EXPR && sgn0 == -1)
8572 /* arg1 code' CST' might be more canonical. */
8577 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8579 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8581 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8582 else if (code == GT_EXPR
8583 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8585 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8586 else if (code == LE_EXPR
8587 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8589 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8590 else if (code == GE_EXPR
8591 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8595 *strict_overflow_p = true;
8598 /* Now build the constant reduced in magnitude. But not if that
8599 would produce one outside of its types range. */
8600 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8602 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8603 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8605 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8606 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8607 /* We cannot swap the comparison here as that would cause us to
8608 endlessly recurse. */
8611 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8612 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8613 if (code0 != INTEGER_CST)
8614 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8615 t = fold_convert (TREE_TYPE (arg1), t);
8617 /* If swapping might yield to a more canonical form, do so. */
8619 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8621 return fold_build2_loc (loc, code, type, t, arg1);
8624 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8625 overflow further. Try to decrease the magnitude of constants involved
8626 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8627 and put sole constants at the second argument position.
8628 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8631 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8632 tree arg0, tree arg1)
8635 bool strict_overflow_p;
8636 const char * const warnmsg = G_("assuming signed overflow does not occur "
8637 "when reducing constant in comparison");
8639 /* Try canonicalization by simplifying arg0. */
8640 strict_overflow_p = false;
8641 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8642 &strict_overflow_p);
8645 if (strict_overflow_p)
8646 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8650 /* Try canonicalization by simplifying arg1 using the swapped
8652 code = swap_tree_comparison (code);
8653 strict_overflow_p = false;
8654 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8655 &strict_overflow_p);
8656 if (t && strict_overflow_p)
8657 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8661 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8662 space. This is used to avoid issuing overflow warnings for
8663 expressions like &p->x which can not wrap. */
8666 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8668 unsigned HOST_WIDE_INT offset_low, total_low;
8669 HOST_WIDE_INT size, offset_high, total_high;
8671 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8677 if (offset == NULL_TREE)
8682 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8686 offset_low = TREE_INT_CST_LOW (offset);
8687 offset_high = TREE_INT_CST_HIGH (offset);
8690 if (add_double_with_sign (offset_low, offset_high,
8691 bitpos / BITS_PER_UNIT, 0,
8692 &total_low, &total_high,
8696 if (total_high != 0)
8699 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8703 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8705 if (TREE_CODE (base) == ADDR_EXPR)
8707 HOST_WIDE_INT base_size;
8709 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8710 if (base_size > 0 && size < base_size)
8714 return total_low > (unsigned HOST_WIDE_INT) size;
8717 /* Subroutine of fold_binary. This routine performs all of the
8718 transformations that are common to the equality/inequality
8719 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8720 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8721 fold_binary should call fold_binary. Fold a comparison with
8722 tree code CODE and type TYPE with operands OP0 and OP1. Return
8723 the folded comparison or NULL_TREE. */
8726 fold_comparison (location_t loc, enum tree_code code, tree type,
8729 tree arg0, arg1, tem;
8734 STRIP_SIGN_NOPS (arg0);
8735 STRIP_SIGN_NOPS (arg1);
8737 tem = fold_relational_const (code, type, arg0, arg1);
8738 if (tem != NULL_TREE)
8741 /* If one arg is a real or integer constant, put it last. */
8742 if (tree_swap_operands_p (arg0, arg1, true))
8743 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8745 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8746 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8747 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8748 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8749 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8750 && (TREE_CODE (arg1) == INTEGER_CST
8751 && !TREE_OVERFLOW (arg1)))
8753 tree const1 = TREE_OPERAND (arg0, 1);
8755 tree variable = TREE_OPERAND (arg0, 0);
8758 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8760 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8761 TREE_TYPE (arg1), const2, const1);
8763 /* If the constant operation overflowed this can be
8764 simplified as a comparison against INT_MAX/INT_MIN. */
8765 if (TREE_CODE (lhs) == INTEGER_CST
8766 && TREE_OVERFLOW (lhs))
8768 int const1_sgn = tree_int_cst_sgn (const1);
8769 enum tree_code code2 = code;
8771 /* Get the sign of the constant on the lhs if the
8772 operation were VARIABLE + CONST1. */
8773 if (TREE_CODE (arg0) == MINUS_EXPR)
8774 const1_sgn = -const1_sgn;
8776 /* The sign of the constant determines if we overflowed
8777 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8778 Canonicalize to the INT_MIN overflow by swapping the comparison
8780 if (const1_sgn == -1)
8781 code2 = swap_tree_comparison (code);
8783 /* We now can look at the canonicalized case
8784 VARIABLE + 1 CODE2 INT_MIN
8785 and decide on the result. */
8786 if (code2 == LT_EXPR
8788 || code2 == EQ_EXPR)
8789 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8790 else if (code2 == NE_EXPR
8792 || code2 == GT_EXPR)
8793 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8796 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8797 && (TREE_CODE (lhs) != INTEGER_CST
8798 || !TREE_OVERFLOW (lhs)))
8800 if (code != EQ_EXPR && code != NE_EXPR)
8801 fold_overflow_warning ("assuming signed overflow does not occur "
8802 "when changing X +- C1 cmp C2 to "
8804 WARN_STRICT_OVERFLOW_COMPARISON);
8805 return fold_build2_loc (loc, code, type, variable, lhs);
8809 /* For comparisons of pointers we can decompose it to a compile time
8810 comparison of the base objects and the offsets into the object.
8811 This requires at least one operand being an ADDR_EXPR or a
8812 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8813 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8814 && (TREE_CODE (arg0) == ADDR_EXPR
8815 || TREE_CODE (arg1) == ADDR_EXPR
8816 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8817 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8819 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8820 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8821 enum machine_mode mode;
8822 int volatilep, unsignedp;
8823 bool indirect_base0 = false, indirect_base1 = false;
8825 /* Get base and offset for the access. Strip ADDR_EXPR for
8826 get_inner_reference, but put it back by stripping INDIRECT_REF
8827 off the base object if possible. indirect_baseN will be true
8828 if baseN is not an address but refers to the object itself. */
8830 if (TREE_CODE (arg0) == ADDR_EXPR)
8832 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8833 &bitsize, &bitpos0, &offset0, &mode,
8834 &unsignedp, &volatilep, false);
8835 if (TREE_CODE (base0) == INDIRECT_REF)
8836 base0 = TREE_OPERAND (base0, 0);
8838 indirect_base0 = true;
8840 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8842 base0 = TREE_OPERAND (arg0, 0);
8843 STRIP_SIGN_NOPS (base0);
8844 if (TREE_CODE (base0) == ADDR_EXPR)
8846 base0 = TREE_OPERAND (base0, 0);
8847 indirect_base0 = true;
8849 offset0 = TREE_OPERAND (arg0, 1);
8853 if (TREE_CODE (arg1) == ADDR_EXPR)
8855 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8856 &bitsize, &bitpos1, &offset1, &mode,
8857 &unsignedp, &volatilep, false);
8858 if (TREE_CODE (base1) == INDIRECT_REF)
8859 base1 = TREE_OPERAND (base1, 0);
8861 indirect_base1 = true;
8863 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8865 base1 = TREE_OPERAND (arg1, 0);
8866 STRIP_SIGN_NOPS (base1);
8867 if (TREE_CODE (base1) == ADDR_EXPR)
8869 base1 = TREE_OPERAND (base1, 0);
8870 indirect_base1 = true;
8872 offset1 = TREE_OPERAND (arg1, 1);
8875 /* A local variable can never be pointed to by
8876 the default SSA name of an incoming parameter. */
8877 if ((TREE_CODE (arg0) == ADDR_EXPR
8879 && TREE_CODE (base0) == VAR_DECL
8880 && auto_var_in_fn_p (base0, current_function_decl)
8882 && TREE_CODE (base1) == SSA_NAME
8883 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8884 && SSA_NAME_IS_DEFAULT_DEF (base1))
8885 || (TREE_CODE (arg1) == ADDR_EXPR
8887 && TREE_CODE (base1) == VAR_DECL
8888 && auto_var_in_fn_p (base1, current_function_decl)
8890 && TREE_CODE (base0) == SSA_NAME
8891 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8892 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8894 if (code == NE_EXPR)
8895 return constant_boolean_node (1, type);
8896 else if (code == EQ_EXPR)
8897 return constant_boolean_node (0, type);
8899 /* If we have equivalent bases we might be able to simplify. */
8900 else if (indirect_base0 == indirect_base1
8901 && operand_equal_p (base0, base1, 0))
8903 /* We can fold this expression to a constant if the non-constant
8904 offset parts are equal. */
8905 if ((offset0 == offset1
8906 || (offset0 && offset1
8907 && operand_equal_p (offset0, offset1, 0)))
8910 || (indirect_base0 && DECL_P (base0))
8911 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8916 && bitpos0 != bitpos1
8917 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8918 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8919 fold_overflow_warning (("assuming pointer wraparound does not "
8920 "occur when comparing P +- C1 with "
8922 WARN_STRICT_OVERFLOW_CONDITIONAL);
8927 return constant_boolean_node (bitpos0 == bitpos1, type);
8929 return constant_boolean_node (bitpos0 != bitpos1, type);
8931 return constant_boolean_node (bitpos0 < bitpos1, type);
8933 return constant_boolean_node (bitpos0 <= bitpos1, type);
8935 return constant_boolean_node (bitpos0 >= bitpos1, type);
8937 return constant_boolean_node (bitpos0 > bitpos1, type);
8941 /* We can simplify the comparison to a comparison of the variable
8942 offset parts if the constant offset parts are equal.
8943 Be careful to use signed size type here because otherwise we
8944 mess with array offsets in the wrong way. This is possible
8945 because pointer arithmetic is restricted to retain within an
8946 object and overflow on pointer differences is undefined as of
8947 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8948 else if (bitpos0 == bitpos1
8949 && ((code == EQ_EXPR || code == NE_EXPR)
8950 || (indirect_base0 && DECL_P (base0))
8951 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8953 /* By converting to signed size type we cover middle-end pointer
8954 arithmetic which operates on unsigned pointer types of size
8955 type size and ARRAY_REF offsets which are properly sign or
8956 zero extended from their type in case it is narrower than
8958 if (offset0 == NULL_TREE)
8959 offset0 = build_int_cst (ssizetype, 0);
8961 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8962 if (offset1 == NULL_TREE)
8963 offset1 = build_int_cst (ssizetype, 0);
8965 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8969 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8970 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8971 fold_overflow_warning (("assuming pointer wraparound does not "
8972 "occur when comparing P +- C1 with "
8974 WARN_STRICT_OVERFLOW_COMPARISON);
8976 return fold_build2_loc (loc, code, type, offset0, offset1);
8979 /* For non-equal bases we can simplify if they are addresses
8980 of local binding decls or constants. */
8981 else if (indirect_base0 && indirect_base1
8982 /* We know that !operand_equal_p (base0, base1, 0)
8983 because the if condition was false. But make
8984 sure two decls are not the same. */
8986 && TREE_CODE (arg0) == ADDR_EXPR
8987 && TREE_CODE (arg1) == ADDR_EXPR
8988 && (((TREE_CODE (base0) == VAR_DECL
8989 || TREE_CODE (base0) == PARM_DECL)
8990 && (targetm.binds_local_p (base0)
8991 || CONSTANT_CLASS_P (base1)))
8992 || CONSTANT_CLASS_P (base0))
8993 && (((TREE_CODE (base1) == VAR_DECL
8994 || TREE_CODE (base1) == PARM_DECL)
8995 && (targetm.binds_local_p (base1)
8996 || CONSTANT_CLASS_P (base0)))
8997 || CONSTANT_CLASS_P (base1)))
8999 if (code == EQ_EXPR)
9000 return omit_two_operands_loc (loc, type, boolean_false_node,
9002 else if (code == NE_EXPR)
9003 return omit_two_operands_loc (loc, type, boolean_true_node,
9006 /* For equal offsets we can simplify to a comparison of the
9008 else if (bitpos0 == bitpos1
9010 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9012 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9013 && ((offset0 == offset1)
9014 || (offset0 && offset1
9015 && operand_equal_p (offset0, offset1, 0))))
9018 base0 = build_fold_addr_expr_loc (loc, base0);
9020 base1 = build_fold_addr_expr_loc (loc, base1);
9021 return fold_build2_loc (loc, code, type, base0, base1);
9025 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9026 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9027 the resulting offset is smaller in absolute value than the
9029 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9030 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9031 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9032 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9033 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9034 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9035 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9037 tree const1 = TREE_OPERAND (arg0, 1);
9038 tree const2 = TREE_OPERAND (arg1, 1);
9039 tree variable1 = TREE_OPERAND (arg0, 0);
9040 tree variable2 = TREE_OPERAND (arg1, 0);
9042 const char * const warnmsg = G_("assuming signed overflow does not "
9043 "occur when combining constants around "
9046 /* Put the constant on the side where it doesn't overflow and is
9047 of lower absolute value than before. */
9048 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9049 ? MINUS_EXPR : PLUS_EXPR,
9051 if (!TREE_OVERFLOW (cst)
9052 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9054 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9055 return fold_build2_loc (loc, code, type,
9057 fold_build2_loc (loc,
9058 TREE_CODE (arg1), TREE_TYPE (arg1),
9062 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9063 ? MINUS_EXPR : PLUS_EXPR,
9065 if (!TREE_OVERFLOW (cst)
9066 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9068 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9069 return fold_build2_loc (loc, code, type,
9070 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9076 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9077 signed arithmetic case. That form is created by the compiler
9078 often enough for folding it to be of value. One example is in
9079 computing loop trip counts after Operator Strength Reduction. */
9080 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9081 && TREE_CODE (arg0) == MULT_EXPR
9082 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9083 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9084 && integer_zerop (arg1))
9086 tree const1 = TREE_OPERAND (arg0, 1);
9087 tree const2 = arg1; /* zero */
9088 tree variable1 = TREE_OPERAND (arg0, 0);
9089 enum tree_code cmp_code = code;
9091 /* Handle unfolded multiplication by zero. */
9092 if (integer_zerop (const1))
9093 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9095 fold_overflow_warning (("assuming signed overflow does not occur when "
9096 "eliminating multiplication in comparison "
9098 WARN_STRICT_OVERFLOW_COMPARISON);
9100 /* If const1 is negative we swap the sense of the comparison. */
9101 if (tree_int_cst_sgn (const1) < 0)
9102 cmp_code = swap_tree_comparison (cmp_code);
9104 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9107 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9111 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9113 tree targ0 = strip_float_extensions (arg0);
9114 tree targ1 = strip_float_extensions (arg1);
9115 tree newtype = TREE_TYPE (targ0);
9117 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9118 newtype = TREE_TYPE (targ1);
9120 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9121 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9122 return fold_build2_loc (loc, code, type,
9123 fold_convert_loc (loc, newtype, targ0),
9124 fold_convert_loc (loc, newtype, targ1));
9126 /* (-a) CMP (-b) -> b CMP a */
9127 if (TREE_CODE (arg0) == NEGATE_EXPR
9128 && TREE_CODE (arg1) == NEGATE_EXPR)
9129 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9130 TREE_OPERAND (arg0, 0));
9132 if (TREE_CODE (arg1) == REAL_CST)
9134 REAL_VALUE_TYPE cst;
9135 cst = TREE_REAL_CST (arg1);
9137 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9138 if (TREE_CODE (arg0) == NEGATE_EXPR)
9139 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9140 TREE_OPERAND (arg0, 0),
9141 build_real (TREE_TYPE (arg1),
9142 real_value_negate (&cst)));
9144 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9145 /* a CMP (-0) -> a CMP 0 */
9146 if (REAL_VALUE_MINUS_ZERO (cst))
9147 return fold_build2_loc (loc, code, type, arg0,
9148 build_real (TREE_TYPE (arg1), dconst0));
9150 /* x != NaN is always true, other ops are always false. */
9151 if (REAL_VALUE_ISNAN (cst)
9152 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9154 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9155 return omit_one_operand_loc (loc, type, tem, arg0);
9158 /* Fold comparisons against infinity. */
9159 if (REAL_VALUE_ISINF (cst)
9160 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9162 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9163 if (tem != NULL_TREE)
9168 /* If this is a comparison of a real constant with a PLUS_EXPR
9169 or a MINUS_EXPR of a real constant, we can convert it into a
9170 comparison with a revised real constant as long as no overflow
9171 occurs when unsafe_math_optimizations are enabled. */
9172 if (flag_unsafe_math_optimizations
9173 && TREE_CODE (arg1) == REAL_CST
9174 && (TREE_CODE (arg0) == PLUS_EXPR
9175 || TREE_CODE (arg0) == MINUS_EXPR)
9176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9177 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9178 ? MINUS_EXPR : PLUS_EXPR,
9179 arg1, TREE_OPERAND (arg0, 1)))
9180 && !TREE_OVERFLOW (tem))
9181 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9183 /* Likewise, we can simplify a comparison of a real constant with
9184 a MINUS_EXPR whose first operand is also a real constant, i.e.
9185 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9186 floating-point types only if -fassociative-math is set. */
9187 if (flag_associative_math
9188 && TREE_CODE (arg1) == REAL_CST
9189 && TREE_CODE (arg0) == MINUS_EXPR
9190 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9191 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9193 && !TREE_OVERFLOW (tem))
9194 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9195 TREE_OPERAND (arg0, 1), tem);
9197 /* Fold comparisons against built-in math functions. */
9198 if (TREE_CODE (arg1) == REAL_CST
9199 && flag_unsafe_math_optimizations
9200 && ! flag_errno_math)
9202 enum built_in_function fcode = builtin_mathfn_code (arg0);
9204 if (fcode != END_BUILTINS)
9206 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9207 if (tem != NULL_TREE)
9213 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9214 && CONVERT_EXPR_P (arg0))
9216 /* If we are widening one operand of an integer comparison,
9217 see if the other operand is similarly being widened. Perhaps we
9218 can do the comparison in the narrower type. */
9219 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9223 /* Or if we are changing signedness. */
9224 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9229 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9230 constant, we can simplify it. */
9231 if (TREE_CODE (arg1) == INTEGER_CST
9232 && (TREE_CODE (arg0) == MIN_EXPR
9233 || TREE_CODE (arg0) == MAX_EXPR)
9234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9236 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9241 /* Simplify comparison of something with itself. (For IEEE
9242 floating-point, we can only do some of these simplifications.) */
9243 if (operand_equal_p (arg0, arg1, 0))
9248 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9249 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9250 return constant_boolean_node (1, type);
9255 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9256 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9257 return constant_boolean_node (1, type);
9258 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9261 /* For NE, we can only do this simplification if integer
9262 or we don't honor IEEE floating point NaNs. */
9263 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9264 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9266 /* ... fall through ... */
9269 return constant_boolean_node (0, type);
9275 /* If we are comparing an expression that just has comparisons
9276 of two integer values, arithmetic expressions of those comparisons,
9277 and constants, we can simplify it. There are only three cases
9278 to check: the two values can either be equal, the first can be
9279 greater, or the second can be greater. Fold the expression for
9280 those three values. Since each value must be 0 or 1, we have
9281 eight possibilities, each of which corresponds to the constant 0
9282 or 1 or one of the six possible comparisons.
9284 This handles common cases like (a > b) == 0 but also handles
9285 expressions like ((x > y) - (y > x)) > 0, which supposedly
9286 occur in macroized code. */
9288 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9290 tree cval1 = 0, cval2 = 0;
9293 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9294 /* Don't handle degenerate cases here; they should already
9295 have been handled anyway. */
9296 && cval1 != 0 && cval2 != 0
9297 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9298 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9299 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9300 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9301 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9302 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9303 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9305 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9306 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9308 /* We can't just pass T to eval_subst in case cval1 or cval2
9309 was the same as ARG1. */
9312 = fold_build2_loc (loc, code, type,
9313 eval_subst (loc, arg0, cval1, maxval,
9317 = fold_build2_loc (loc, code, type,
9318 eval_subst (loc, arg0, cval1, maxval,
9322 = fold_build2_loc (loc, code, type,
9323 eval_subst (loc, arg0, cval1, minval,
9327 /* All three of these results should be 0 or 1. Confirm they are.
9328 Then use those values to select the proper code to use. */
9330 if (TREE_CODE (high_result) == INTEGER_CST
9331 && TREE_CODE (equal_result) == INTEGER_CST
9332 && TREE_CODE (low_result) == INTEGER_CST)
9334 /* Make a 3-bit mask with the high-order bit being the
9335 value for `>', the next for '=', and the low for '<'. */
9336 switch ((integer_onep (high_result) * 4)
9337 + (integer_onep (equal_result) * 2)
9338 + integer_onep (low_result))
9342 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9363 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9368 tem = save_expr (build2 (code, type, cval1, cval2));
9369 SET_EXPR_LOCATION (tem, loc);
9372 return fold_build2_loc (loc, code, type, cval1, cval2);
9377 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9378 into a single range test. */
9379 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9380 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9381 && TREE_CODE (arg1) == INTEGER_CST
9382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9383 && !integer_zerop (TREE_OPERAND (arg0, 1))
9384 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9385 && !TREE_OVERFLOW (arg1))
9387 tem = fold_div_compare (loc, code, type, arg0, arg1);
9388 if (tem != NULL_TREE)
9392 /* Fold ~X op ~Y as Y op X. */
9393 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9394 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9396 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9397 return fold_build2_loc (loc, code, type,
9398 fold_convert_loc (loc, cmp_type,
9399 TREE_OPERAND (arg1, 0)),
9400 TREE_OPERAND (arg0, 0));
9403 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9404 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9405 && TREE_CODE (arg1) == INTEGER_CST)
9407 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9408 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9409 TREE_OPERAND (arg0, 0),
9410 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9411 fold_convert_loc (loc, cmp_type, arg1)));
9418 /* Subroutine of fold_binary. Optimize complex multiplications of the
9419 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9420 argument EXPR represents the expression "z" of type TYPE. */
9423 fold_mult_zconjz (location_t loc, tree type, tree expr)
9425 tree itype = TREE_TYPE (type);
9426 tree rpart, ipart, tem;
9428 if (TREE_CODE (expr) == COMPLEX_EXPR)
9430 rpart = TREE_OPERAND (expr, 0);
9431 ipart = TREE_OPERAND (expr, 1);
9433 else if (TREE_CODE (expr) == COMPLEX_CST)
9435 rpart = TREE_REALPART (expr);
9436 ipart = TREE_IMAGPART (expr);
9440 expr = save_expr (expr);
9441 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9442 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9445 rpart = save_expr (rpart);
9446 ipart = save_expr (ipart);
9447 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9448 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9449 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9450 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9451 build_zero_cst (itype));
9455 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9456 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9457 guarantees that P and N have the same least significant log2(M) bits.
9458 N is not otherwise constrained. In particular, N is not normalized to
9459 0 <= N < M as is common. In general, the precise value of P is unknown.
9460 M is chosen as large as possible such that constant N can be determined.
9462 Returns M and sets *RESIDUE to N.
9464 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9465 account. This is not always possible due to PR 35705.
9468 static unsigned HOST_WIDE_INT
9469 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9470 bool allow_func_align)
9472 enum tree_code code;
9476 code = TREE_CODE (expr);
9477 if (code == ADDR_EXPR)
9479 unsigned int bitalign;
9480 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9481 *residue /= BITS_PER_UNIT;
9482 return bitalign / BITS_PER_UNIT;
9484 else if (code == POINTER_PLUS_EXPR)
9487 unsigned HOST_WIDE_INT modulus;
9488 enum tree_code inner_code;
9490 op0 = TREE_OPERAND (expr, 0);
9492 modulus = get_pointer_modulus_and_residue (op0, residue,
9495 op1 = TREE_OPERAND (expr, 1);
9497 inner_code = TREE_CODE (op1);
9498 if (inner_code == INTEGER_CST)
9500 *residue += TREE_INT_CST_LOW (op1);
9503 else if (inner_code == MULT_EXPR)
9505 op1 = TREE_OPERAND (op1, 1);
9506 if (TREE_CODE (op1) == INTEGER_CST)
9508 unsigned HOST_WIDE_INT align;
9510 /* Compute the greatest power-of-2 divisor of op1. */
9511 align = TREE_INT_CST_LOW (op1);
9514 /* If align is non-zero and less than *modulus, replace
9515 *modulus with align., If align is 0, then either op1 is 0
9516 or the greatest power-of-2 divisor of op1 doesn't fit in an
9517 unsigned HOST_WIDE_INT. In either case, no additional
9518 constraint is imposed. */
9520 modulus = MIN (modulus, align);
9527 /* If we get here, we were unable to determine anything useful about the
9533 /* Fold a binary expression of code CODE and type TYPE with operands
9534 OP0 and OP1. LOC is the location of the resulting expression.
9535 Return the folded expression if folding is successful. Otherwise,
9536 return NULL_TREE. */
9539 fold_binary_loc (location_t loc,
9540 enum tree_code code, tree type, tree op0, tree op1)
9542 enum tree_code_class kind = TREE_CODE_CLASS (code);
9543 tree arg0, arg1, tem;
9544 tree t1 = NULL_TREE;
9545 bool strict_overflow_p;
9547 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9548 && TREE_CODE_LENGTH (code) == 2
9550 && op1 != NULL_TREE);
9555 /* Strip any conversions that don't change the mode. This is
9556 safe for every expression, except for a comparison expression
9557 because its signedness is derived from its operands. So, in
9558 the latter case, only strip conversions that don't change the
9559 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9562 Note that this is done as an internal manipulation within the
9563 constant folder, in order to find the simplest representation
9564 of the arguments so that their form can be studied. In any
9565 cases, the appropriate type conversions should be put back in
9566 the tree that will get out of the constant folder. */
9568 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9570 STRIP_SIGN_NOPS (arg0);
9571 STRIP_SIGN_NOPS (arg1);
9579 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9580 constant but we can't do arithmetic on them. */
9581 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9582 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9583 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9584 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9585 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9586 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9588 if (kind == tcc_binary)
9590 /* Make sure type and arg0 have the same saturating flag. */
9591 gcc_assert (TYPE_SATURATING (type)
9592 == TYPE_SATURATING (TREE_TYPE (arg0)));
9593 tem = const_binop (code, arg0, arg1);
9595 else if (kind == tcc_comparison)
9596 tem = fold_relational_const (code, type, arg0, arg1);
9600 if (tem != NULL_TREE)
9602 if (TREE_TYPE (tem) != type)
9603 tem = fold_convert_loc (loc, type, tem);
9608 /* If this is a commutative operation, and ARG0 is a constant, move it
9609 to ARG1 to reduce the number of tests below. */
9610 if (commutative_tree_code (code)
9611 && tree_swap_operands_p (arg0, arg1, true))
9612 return fold_build2_loc (loc, code, type, op1, op0);
9614 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9616 First check for cases where an arithmetic operation is applied to a
9617 compound, conditional, or comparison operation. Push the arithmetic
9618 operation inside the compound or conditional to see if any folding
9619 can then be done. Convert comparison to conditional for this purpose.
9620 The also optimizes non-constant cases that used to be done in
9623 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9624 one of the operands is a comparison and the other is a comparison, a
9625 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9626 code below would make the expression more complex. Change it to a
9627 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9628 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9630 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9631 || code == EQ_EXPR || code == NE_EXPR)
9632 && ((truth_value_p (TREE_CODE (arg0))
9633 && (truth_value_p (TREE_CODE (arg1))
9634 || (TREE_CODE (arg1) == BIT_AND_EXPR
9635 && integer_onep (TREE_OPERAND (arg1, 1)))))
9636 || (truth_value_p (TREE_CODE (arg1))
9637 && (truth_value_p (TREE_CODE (arg0))
9638 || (TREE_CODE (arg0) == BIT_AND_EXPR
9639 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9641 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9642 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9645 fold_convert_loc (loc, boolean_type_node, arg0),
9646 fold_convert_loc (loc, boolean_type_node, arg1));
9648 if (code == EQ_EXPR)
9649 tem = invert_truthvalue_loc (loc, tem);
9651 return fold_convert_loc (loc, type, tem);
9654 if (TREE_CODE_CLASS (code) == tcc_binary
9655 || TREE_CODE_CLASS (code) == tcc_comparison)
9657 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9659 tem = fold_build2_loc (loc, code, type,
9660 fold_convert_loc (loc, TREE_TYPE (op0),
9661 TREE_OPERAND (arg0, 1)), op1);
9662 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9665 if (TREE_CODE (arg1) == COMPOUND_EXPR
9666 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9668 tem = fold_build2_loc (loc, code, type, op0,
9669 fold_convert_loc (loc, TREE_TYPE (op1),
9670 TREE_OPERAND (arg1, 1)));
9671 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9675 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9677 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9679 /*cond_first_p=*/1);
9680 if (tem != NULL_TREE)
9684 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9686 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9688 /*cond_first_p=*/0);
9689 if (tem != NULL_TREE)
9697 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9698 if (TREE_CODE (arg0) == ADDR_EXPR
9699 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9701 tree iref = TREE_OPERAND (arg0, 0);
9702 return fold_build2 (MEM_REF, type,
9703 TREE_OPERAND (iref, 0),
9704 int_const_binop (PLUS_EXPR, arg1,
9705 TREE_OPERAND (iref, 1)));
9708 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9709 if (TREE_CODE (arg0) == ADDR_EXPR
9710 && handled_component_p (TREE_OPERAND (arg0, 0)))
9713 HOST_WIDE_INT coffset;
9714 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9718 return fold_build2 (MEM_REF, type,
9719 build_fold_addr_expr (base),
9720 int_const_binop (PLUS_EXPR, arg1,
9721 size_int (coffset)));
9726 case POINTER_PLUS_EXPR:
9727 /* 0 +p index -> (type)index */
9728 if (integer_zerop (arg0))
9729 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9731 /* PTR +p 0 -> PTR */
9732 if (integer_zerop (arg1))
9733 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9735 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9736 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9737 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9738 return fold_convert_loc (loc, type,
9739 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9740 fold_convert_loc (loc, sizetype,
9742 fold_convert_loc (loc, sizetype,
9745 /* (PTR +p B) +p A -> PTR +p (B + A) */
9746 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9749 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9750 tree arg00 = TREE_OPERAND (arg0, 0);
9751 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9752 arg01, fold_convert_loc (loc, sizetype, arg1));
9753 return fold_convert_loc (loc, type,
9754 fold_build_pointer_plus_loc (loc,
9758 /* PTR_CST +p CST -> CST1 */
9759 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9760 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9761 fold_convert_loc (loc, type, arg1));
9763 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9764 of the array. Loop optimizer sometimes produce this type of
9766 if (TREE_CODE (arg0) == ADDR_EXPR)
9768 tem = try_move_mult_to_index (loc, arg0,
9769 fold_convert_loc (loc, sizetype, arg1));
9771 return fold_convert_loc (loc, type, tem);
9777 /* A + (-B) -> A - B */
9778 if (TREE_CODE (arg1) == NEGATE_EXPR)
9779 return fold_build2_loc (loc, MINUS_EXPR, type,
9780 fold_convert_loc (loc, type, arg0),
9781 fold_convert_loc (loc, type,
9782 TREE_OPERAND (arg1, 0)));
9783 /* (-A) + B -> B - A */
9784 if (TREE_CODE (arg0) == NEGATE_EXPR
9785 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9786 return fold_build2_loc (loc, MINUS_EXPR, type,
9787 fold_convert_loc (loc, type, arg1),
9788 fold_convert_loc (loc, type,
9789 TREE_OPERAND (arg0, 0)));
9791 if (INTEGRAL_TYPE_P (type))
9793 /* Convert ~A + 1 to -A. */
9794 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9795 && integer_onep (arg1))
9796 return fold_build1_loc (loc, NEGATE_EXPR, type,
9797 fold_convert_loc (loc, type,
9798 TREE_OPERAND (arg0, 0)));
9801 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9802 && !TYPE_OVERFLOW_TRAPS (type))
9804 tree tem = TREE_OPERAND (arg0, 0);
9807 if (operand_equal_p (tem, arg1, 0))
9809 t1 = build_int_cst_type (type, -1);
9810 return omit_one_operand_loc (loc, type, t1, arg1);
9815 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9816 && !TYPE_OVERFLOW_TRAPS (type))
9818 tree tem = TREE_OPERAND (arg1, 0);
9821 if (operand_equal_p (arg0, tem, 0))
9823 t1 = build_int_cst_type (type, -1);
9824 return omit_one_operand_loc (loc, type, t1, arg0);
9828 /* X + (X / CST) * -CST is X % CST. */
9829 if (TREE_CODE (arg1) == MULT_EXPR
9830 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9831 && operand_equal_p (arg0,
9832 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9834 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9835 tree cst1 = TREE_OPERAND (arg1, 1);
9836 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9838 if (sum && integer_zerop (sum))
9839 return fold_convert_loc (loc, type,
9840 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9841 TREE_TYPE (arg0), arg0,
9846 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9847 same or one. Make sure type is not saturating.
9848 fold_plusminus_mult_expr will re-associate. */
9849 if ((TREE_CODE (arg0) == MULT_EXPR
9850 || TREE_CODE (arg1) == MULT_EXPR)
9851 && !TYPE_SATURATING (type)
9852 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9854 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9859 if (! FLOAT_TYPE_P (type))
9861 if (integer_zerop (arg1))
9862 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9864 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9865 with a constant, and the two constants have no bits in common,
9866 we should treat this as a BIT_IOR_EXPR since this may produce more
9868 if (TREE_CODE (arg0) == BIT_AND_EXPR
9869 && TREE_CODE (arg1) == BIT_AND_EXPR
9870 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9871 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9872 && integer_zerop (const_binop (BIT_AND_EXPR,
9873 TREE_OPERAND (arg0, 1),
9874 TREE_OPERAND (arg1, 1))))
9876 code = BIT_IOR_EXPR;
9880 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9881 (plus (plus (mult) (mult)) (foo)) so that we can
9882 take advantage of the factoring cases below. */
9883 if (TYPE_OVERFLOW_WRAPS (type)
9884 && (((TREE_CODE (arg0) == PLUS_EXPR
9885 || TREE_CODE (arg0) == MINUS_EXPR)
9886 && TREE_CODE (arg1) == MULT_EXPR)
9887 || ((TREE_CODE (arg1) == PLUS_EXPR
9888 || TREE_CODE (arg1) == MINUS_EXPR)
9889 && TREE_CODE (arg0) == MULT_EXPR)))
9891 tree parg0, parg1, parg, marg;
9892 enum tree_code pcode;
9894 if (TREE_CODE (arg1) == MULT_EXPR)
9895 parg = arg0, marg = arg1;
9897 parg = arg1, marg = arg0;
9898 pcode = TREE_CODE (parg);
9899 parg0 = TREE_OPERAND (parg, 0);
9900 parg1 = TREE_OPERAND (parg, 1);
9904 if (TREE_CODE (parg0) == MULT_EXPR
9905 && TREE_CODE (parg1) != MULT_EXPR)
9906 return fold_build2_loc (loc, pcode, type,
9907 fold_build2_loc (loc, PLUS_EXPR, type,
9908 fold_convert_loc (loc, type,
9910 fold_convert_loc (loc, type,
9912 fold_convert_loc (loc, type, parg1));
9913 if (TREE_CODE (parg0) != MULT_EXPR
9914 && TREE_CODE (parg1) == MULT_EXPR)
9916 fold_build2_loc (loc, PLUS_EXPR, type,
9917 fold_convert_loc (loc, type, parg0),
9918 fold_build2_loc (loc, pcode, type,
9919 fold_convert_loc (loc, type, marg),
9920 fold_convert_loc (loc, type,
9926 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9927 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9928 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9930 /* Likewise if the operands are reversed. */
9931 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9934 /* Convert X + -C into X - C. */
9935 if (TREE_CODE (arg1) == REAL_CST
9936 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9938 tem = fold_negate_const (arg1, type);
9939 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9940 return fold_build2_loc (loc, MINUS_EXPR, type,
9941 fold_convert_loc (loc, type, arg0),
9942 fold_convert_loc (loc, type, tem));
9945 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9946 to __complex__ ( x, y ). This is not the same for SNaNs or
9947 if signed zeros are involved. */
9948 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9949 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9950 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9952 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9953 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9954 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9955 bool arg0rz = false, arg0iz = false;
9956 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9957 || (arg0i && (arg0iz = real_zerop (arg0i))))
9959 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9960 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9961 if (arg0rz && arg1i && real_zerop (arg1i))
9963 tree rp = arg1r ? arg1r
9964 : build1 (REALPART_EXPR, rtype, arg1);
9965 tree ip = arg0i ? arg0i
9966 : build1 (IMAGPART_EXPR, rtype, arg0);
9967 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9969 else if (arg0iz && arg1r && real_zerop (arg1r))
9971 tree rp = arg0r ? arg0r
9972 : build1 (REALPART_EXPR, rtype, arg0);
9973 tree ip = arg1i ? arg1i
9974 : build1 (IMAGPART_EXPR, rtype, arg1);
9975 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9980 if (flag_unsafe_math_optimizations
9981 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9982 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9983 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9986 /* Convert x+x into x*2.0. */
9987 if (operand_equal_p (arg0, arg1, 0)
9988 && SCALAR_FLOAT_TYPE_P (type))
9989 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9990 build_real (type, dconst2));
9992 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9993 We associate floats only if the user has specified
9994 -fassociative-math. */
9995 if (flag_associative_math
9996 && TREE_CODE (arg1) == PLUS_EXPR
9997 && TREE_CODE (arg0) != MULT_EXPR)
9999 tree tree10 = TREE_OPERAND (arg1, 0);
10000 tree tree11 = TREE_OPERAND (arg1, 1);
10001 if (TREE_CODE (tree11) == MULT_EXPR
10002 && TREE_CODE (tree10) == MULT_EXPR)
10005 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10006 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10009 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10010 We associate floats only if the user has specified
10011 -fassociative-math. */
10012 if (flag_associative_math
10013 && TREE_CODE (arg0) == PLUS_EXPR
10014 && TREE_CODE (arg1) != MULT_EXPR)
10016 tree tree00 = TREE_OPERAND (arg0, 0);
10017 tree tree01 = TREE_OPERAND (arg0, 1);
10018 if (TREE_CODE (tree01) == MULT_EXPR
10019 && TREE_CODE (tree00) == MULT_EXPR)
10022 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10023 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10029 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10030 is a rotate of A by C1 bits. */
10031 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10032 is a rotate of A by B bits. */
10034 enum tree_code code0, code1;
10036 code0 = TREE_CODE (arg0);
10037 code1 = TREE_CODE (arg1);
10038 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10039 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10040 && operand_equal_p (TREE_OPERAND (arg0, 0),
10041 TREE_OPERAND (arg1, 0), 0)
10042 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10043 TYPE_UNSIGNED (rtype))
10044 /* Only create rotates in complete modes. Other cases are not
10045 expanded properly. */
10046 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10048 tree tree01, tree11;
10049 enum tree_code code01, code11;
10051 tree01 = TREE_OPERAND (arg0, 1);
10052 tree11 = TREE_OPERAND (arg1, 1);
10053 STRIP_NOPS (tree01);
10054 STRIP_NOPS (tree11);
10055 code01 = TREE_CODE (tree01);
10056 code11 = TREE_CODE (tree11);
10057 if (code01 == INTEGER_CST
10058 && code11 == INTEGER_CST
10059 && TREE_INT_CST_HIGH (tree01) == 0
10060 && TREE_INT_CST_HIGH (tree11) == 0
10061 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10062 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10064 tem = build2_loc (loc, LROTATE_EXPR,
10065 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10066 TREE_OPERAND (arg0, 0),
10067 code0 == LSHIFT_EXPR ? tree01 : tree11);
10068 return fold_convert_loc (loc, type, tem);
10070 else if (code11 == MINUS_EXPR)
10072 tree tree110, tree111;
10073 tree110 = TREE_OPERAND (tree11, 0);
10074 tree111 = TREE_OPERAND (tree11, 1);
10075 STRIP_NOPS (tree110);
10076 STRIP_NOPS (tree111);
10077 if (TREE_CODE (tree110) == INTEGER_CST
10078 && 0 == compare_tree_int (tree110,
10080 (TREE_TYPE (TREE_OPERAND
10082 && operand_equal_p (tree01, tree111, 0))
10084 fold_convert_loc (loc, type,
10085 build2 ((code0 == LSHIFT_EXPR
10088 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10089 TREE_OPERAND (arg0, 0), tree01));
10091 else if (code01 == MINUS_EXPR)
10093 tree tree010, tree011;
10094 tree010 = TREE_OPERAND (tree01, 0);
10095 tree011 = TREE_OPERAND (tree01, 1);
10096 STRIP_NOPS (tree010);
10097 STRIP_NOPS (tree011);
10098 if (TREE_CODE (tree010) == INTEGER_CST
10099 && 0 == compare_tree_int (tree010,
10101 (TREE_TYPE (TREE_OPERAND
10103 && operand_equal_p (tree11, tree011, 0))
10104 return fold_convert_loc
10106 build2 ((code0 != LSHIFT_EXPR
10109 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10110 TREE_OPERAND (arg0, 0), tree11));
10116 /* In most languages, can't associate operations on floats through
10117 parentheses. Rather than remember where the parentheses were, we
10118 don't associate floats at all, unless the user has specified
10119 -fassociative-math.
10120 And, we need to make sure type is not saturating. */
10122 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10123 && !TYPE_SATURATING (type))
10125 tree var0, con0, lit0, minus_lit0;
10126 tree var1, con1, lit1, minus_lit1;
10129 /* Split both trees into variables, constants, and literals. Then
10130 associate each group together, the constants with literals,
10131 then the result with variables. This increases the chances of
10132 literals being recombined later and of generating relocatable
10133 expressions for the sum of a constant and literal. */
10134 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10135 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10136 code == MINUS_EXPR);
10138 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10139 if (code == MINUS_EXPR)
10142 /* With undefined overflow we can only associate constants with one
10143 variable, and constants whose association doesn't overflow. */
10144 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10145 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10152 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10153 tmp0 = TREE_OPERAND (tmp0, 0);
10154 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10155 tmp1 = TREE_OPERAND (tmp1, 0);
10156 /* The only case we can still associate with two variables
10157 is if they are the same, modulo negation. */
10158 if (!operand_equal_p (tmp0, tmp1, 0))
10162 if (ok && lit0 && lit1)
10164 tree tmp0 = fold_convert (type, lit0);
10165 tree tmp1 = fold_convert (type, lit1);
10167 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10168 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10173 /* Only do something if we found more than two objects. Otherwise,
10174 nothing has changed and we risk infinite recursion. */
10176 && (2 < ((var0 != 0) + (var1 != 0)
10177 + (con0 != 0) + (con1 != 0)
10178 + (lit0 != 0) + (lit1 != 0)
10179 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10181 var0 = associate_trees (loc, var0, var1, code, type);
10182 con0 = associate_trees (loc, con0, con1, code, type);
10183 lit0 = associate_trees (loc, lit0, lit1, code, type);
10184 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10186 /* Preserve the MINUS_EXPR if the negative part of the literal is
10187 greater than the positive part. Otherwise, the multiplicative
10188 folding code (i.e extract_muldiv) may be fooled in case
10189 unsigned constants are subtracted, like in the following
10190 example: ((X*2 + 4) - 8U)/2. */
10191 if (minus_lit0 && lit0)
10193 if (TREE_CODE (lit0) == INTEGER_CST
10194 && TREE_CODE (minus_lit0) == INTEGER_CST
10195 && tree_int_cst_lt (lit0, minus_lit0))
10197 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10203 lit0 = associate_trees (loc, lit0, minus_lit0,
10212 fold_convert_loc (loc, type,
10213 associate_trees (loc, var0, minus_lit0,
10214 MINUS_EXPR, type));
10217 con0 = associate_trees (loc, con0, minus_lit0,
10220 fold_convert_loc (loc, type,
10221 associate_trees (loc, var0, con0,
10226 con0 = associate_trees (loc, con0, lit0, code, type);
10228 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10236 /* Pointer simplifications for subtraction, simple reassociations. */
10237 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10239 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10240 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10241 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10243 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10244 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10245 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10246 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10247 return fold_build2_loc (loc, PLUS_EXPR, type,
10248 fold_build2_loc (loc, MINUS_EXPR, type,
10250 fold_build2_loc (loc, MINUS_EXPR, type,
10253 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10254 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10256 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10257 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10258 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10259 fold_convert_loc (loc, type, arg1));
10261 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10264 /* A - (-B) -> A + B */
10265 if (TREE_CODE (arg1) == NEGATE_EXPR)
10266 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10267 fold_convert_loc (loc, type,
10268 TREE_OPERAND (arg1, 0)));
10269 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10270 if (TREE_CODE (arg0) == NEGATE_EXPR
10271 && (FLOAT_TYPE_P (type)
10272 || INTEGRAL_TYPE_P (type))
10273 && negate_expr_p (arg1)
10274 && reorder_operands_p (arg0, arg1))
10275 return fold_build2_loc (loc, MINUS_EXPR, type,
10276 fold_convert_loc (loc, type,
10277 negate_expr (arg1)),
10278 fold_convert_loc (loc, type,
10279 TREE_OPERAND (arg0, 0)));
10280 /* Convert -A - 1 to ~A. */
10281 if (INTEGRAL_TYPE_P (type)
10282 && TREE_CODE (arg0) == NEGATE_EXPR
10283 && integer_onep (arg1)
10284 && !TYPE_OVERFLOW_TRAPS (type))
10285 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10286 fold_convert_loc (loc, type,
10287 TREE_OPERAND (arg0, 0)));
10289 /* Convert -1 - A to ~A. */
10290 if (INTEGRAL_TYPE_P (type)
10291 && integer_all_onesp (arg0))
10292 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10295 /* X - (X / CST) * CST is X % CST. */
10296 if (INTEGRAL_TYPE_P (type)
10297 && TREE_CODE (arg1) == MULT_EXPR
10298 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10299 && operand_equal_p (arg0,
10300 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10301 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10302 TREE_OPERAND (arg1, 1), 0))
10304 fold_convert_loc (loc, type,
10305 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10306 arg0, TREE_OPERAND (arg1, 1)));
10308 if (! FLOAT_TYPE_P (type))
10310 if (integer_zerop (arg0))
10311 return negate_expr (fold_convert_loc (loc, type, arg1));
10312 if (integer_zerop (arg1))
10313 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10315 /* Fold A - (A & B) into ~B & A. */
10316 if (!TREE_SIDE_EFFECTS (arg0)
10317 && TREE_CODE (arg1) == BIT_AND_EXPR)
10319 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10321 tree arg10 = fold_convert_loc (loc, type,
10322 TREE_OPERAND (arg1, 0));
10323 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10324 fold_build1_loc (loc, BIT_NOT_EXPR,
10326 fold_convert_loc (loc, type, arg0));
10328 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10330 tree arg11 = fold_convert_loc (loc,
10331 type, TREE_OPERAND (arg1, 1));
10332 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10333 fold_build1_loc (loc, BIT_NOT_EXPR,
10335 fold_convert_loc (loc, type, arg0));
10339 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10340 any power of 2 minus 1. */
10341 if (TREE_CODE (arg0) == BIT_AND_EXPR
10342 && TREE_CODE (arg1) == BIT_AND_EXPR
10343 && operand_equal_p (TREE_OPERAND (arg0, 0),
10344 TREE_OPERAND (arg1, 0), 0))
10346 tree mask0 = TREE_OPERAND (arg0, 1);
10347 tree mask1 = TREE_OPERAND (arg1, 1);
10348 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10350 if (operand_equal_p (tem, mask1, 0))
10352 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10353 TREE_OPERAND (arg0, 0), mask1);
10354 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10359 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10360 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10361 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10363 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10364 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10365 (-ARG1 + ARG0) reduces to -ARG1. */
10366 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10367 return negate_expr (fold_convert_loc (loc, type, arg1));
10369 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10370 __complex__ ( x, -y ). This is not the same for SNaNs or if
10371 signed zeros are involved. */
10372 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10373 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10374 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10376 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10377 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10378 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10379 bool arg0rz = false, arg0iz = false;
10380 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10381 || (arg0i && (arg0iz = real_zerop (arg0i))))
10383 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10384 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10385 if (arg0rz && arg1i && real_zerop (arg1i))
10387 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10389 : build1 (REALPART_EXPR, rtype, arg1));
10390 tree ip = arg0i ? arg0i
10391 : build1 (IMAGPART_EXPR, rtype, arg0);
10392 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10394 else if (arg0iz && arg1r && real_zerop (arg1r))
10396 tree rp = arg0r ? arg0r
10397 : build1 (REALPART_EXPR, rtype, arg0);
10398 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10400 : build1 (IMAGPART_EXPR, rtype, arg1));
10401 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10406 /* Fold &x - &x. This can happen from &x.foo - &x.
10407 This is unsafe for certain floats even in non-IEEE formats.
10408 In IEEE, it is unsafe because it does wrong for NaNs.
10409 Also note that operand_equal_p is always false if an operand
10412 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10413 && operand_equal_p (arg0, arg1, 0))
10414 return build_zero_cst (type);
10416 /* A - B -> A + (-B) if B is easily negatable. */
10417 if (negate_expr_p (arg1)
10418 && ((FLOAT_TYPE_P (type)
10419 /* Avoid this transformation if B is a positive REAL_CST. */
10420 && (TREE_CODE (arg1) != REAL_CST
10421 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10422 || INTEGRAL_TYPE_P (type)))
10423 return fold_build2_loc (loc, PLUS_EXPR, type,
10424 fold_convert_loc (loc, type, arg0),
10425 fold_convert_loc (loc, type,
10426 negate_expr (arg1)));
10428 /* Try folding difference of addresses. */
10430 HOST_WIDE_INT diff;
10432 if ((TREE_CODE (arg0) == ADDR_EXPR
10433 || TREE_CODE (arg1) == ADDR_EXPR)
10434 && ptr_difference_const (arg0, arg1, &diff))
10435 return build_int_cst_type (type, diff);
10438 /* Fold &a[i] - &a[j] to i-j. */
10439 if (TREE_CODE (arg0) == ADDR_EXPR
10440 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10441 && TREE_CODE (arg1) == ADDR_EXPR
10442 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10444 tree aref0 = TREE_OPERAND (arg0, 0);
10445 tree aref1 = TREE_OPERAND (arg1, 0);
10446 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10447 TREE_OPERAND (aref1, 0), 0))
10449 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10450 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10451 tree esz = array_ref_element_size (aref0);
10452 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10453 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10454 fold_convert_loc (loc, type, esz));
10459 if (FLOAT_TYPE_P (type)
10460 && flag_unsafe_math_optimizations
10461 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10462 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10463 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10466 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10467 same or one. Make sure type is not saturating.
10468 fold_plusminus_mult_expr will re-associate. */
10469 if ((TREE_CODE (arg0) == MULT_EXPR
10470 || TREE_CODE (arg1) == MULT_EXPR)
10471 && !TYPE_SATURATING (type)
10472 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10474 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10482 /* (-A) * (-B) -> A * B */
10483 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10484 return fold_build2_loc (loc, MULT_EXPR, type,
10485 fold_convert_loc (loc, type,
10486 TREE_OPERAND (arg0, 0)),
10487 fold_convert_loc (loc, type,
10488 negate_expr (arg1)));
10489 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10490 return fold_build2_loc (loc, MULT_EXPR, type,
10491 fold_convert_loc (loc, type,
10492 negate_expr (arg0)),
10493 fold_convert_loc (loc, type,
10494 TREE_OPERAND (arg1, 0)));
10496 if (! FLOAT_TYPE_P (type))
10498 if (integer_zerop (arg1))
10499 return omit_one_operand_loc (loc, type, arg1, arg0);
10500 if (integer_onep (arg1))
10501 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10502 /* Transform x * -1 into -x. Make sure to do the negation
10503 on the original operand with conversions not stripped
10504 because we can only strip non-sign-changing conversions. */
10505 if (integer_all_onesp (arg1))
10506 return fold_convert_loc (loc, type, negate_expr (op0));
10507 /* Transform x * -C into -x * C if x is easily negatable. */
10508 if (TREE_CODE (arg1) == INTEGER_CST
10509 && tree_int_cst_sgn (arg1) == -1
10510 && negate_expr_p (arg0)
10511 && (tem = negate_expr (arg1)) != arg1
10512 && !TREE_OVERFLOW (tem))
10513 return fold_build2_loc (loc, MULT_EXPR, type,
10514 fold_convert_loc (loc, type,
10515 negate_expr (arg0)),
10518 /* (a * (1 << b)) is (a << b) */
10519 if (TREE_CODE (arg1) == LSHIFT_EXPR
10520 && integer_onep (TREE_OPERAND (arg1, 0)))
10521 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10522 TREE_OPERAND (arg1, 1));
10523 if (TREE_CODE (arg0) == LSHIFT_EXPR
10524 && integer_onep (TREE_OPERAND (arg0, 0)))
10525 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10526 TREE_OPERAND (arg0, 1));
10528 /* (A + A) * C -> A * 2 * C */
10529 if (TREE_CODE (arg0) == PLUS_EXPR
10530 && TREE_CODE (arg1) == INTEGER_CST
10531 && operand_equal_p (TREE_OPERAND (arg0, 0),
10532 TREE_OPERAND (arg0, 1), 0))
10533 return fold_build2_loc (loc, MULT_EXPR, type,
10534 omit_one_operand_loc (loc, type,
10535 TREE_OPERAND (arg0, 0),
10536 TREE_OPERAND (arg0, 1)),
10537 fold_build2_loc (loc, MULT_EXPR, type,
10538 build_int_cst (type, 2) , arg1));
10540 strict_overflow_p = false;
10541 if (TREE_CODE (arg1) == INTEGER_CST
10542 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10543 &strict_overflow_p)))
10545 if (strict_overflow_p)
10546 fold_overflow_warning (("assuming signed overflow does not "
10547 "occur when simplifying "
10549 WARN_STRICT_OVERFLOW_MISC);
10550 return fold_convert_loc (loc, type, tem);
10553 /* Optimize z * conj(z) for integer complex numbers. */
10554 if (TREE_CODE (arg0) == CONJ_EXPR
10555 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10556 return fold_mult_zconjz (loc, type, arg1);
10557 if (TREE_CODE (arg1) == CONJ_EXPR
10558 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10559 return fold_mult_zconjz (loc, type, arg0);
10563 /* Maybe fold x * 0 to 0. The expressions aren't the same
10564 when x is NaN, since x * 0 is also NaN. Nor are they the
10565 same in modes with signed zeros, since multiplying a
10566 negative value by 0 gives -0, not +0. */
10567 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10568 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10569 && real_zerop (arg1))
10570 return omit_one_operand_loc (loc, type, arg1, arg0);
10571 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10572 Likewise for complex arithmetic with signed zeros. */
10573 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10574 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10575 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10576 && real_onep (arg1))
10577 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10579 /* Transform x * -1.0 into -x. */
10580 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10581 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10582 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10583 && real_minus_onep (arg1))
10584 return fold_convert_loc (loc, type, negate_expr (arg0));
10586 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10587 the result for floating point types due to rounding so it is applied
10588 only if -fassociative-math was specify. */
10589 if (flag_associative_math
10590 && TREE_CODE (arg0) == RDIV_EXPR
10591 && TREE_CODE (arg1) == REAL_CST
10592 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10594 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10597 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10598 TREE_OPERAND (arg0, 1));
10601 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10602 if (operand_equal_p (arg0, arg1, 0))
10604 tree tem = fold_strip_sign_ops (arg0);
10605 if (tem != NULL_TREE)
10607 tem = fold_convert_loc (loc, type, tem);
10608 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10612 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10613 This is not the same for NaNs or if signed zeros are
10615 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10616 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10617 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10618 && TREE_CODE (arg1) == COMPLEX_CST
10619 && real_zerop (TREE_REALPART (arg1)))
10621 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10622 if (real_onep (TREE_IMAGPART (arg1)))
10624 fold_build2_loc (loc, COMPLEX_EXPR, type,
10625 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10627 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10628 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10630 fold_build2_loc (loc, COMPLEX_EXPR, type,
10631 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10632 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10636 /* Optimize z * conj(z) for floating point complex numbers.
10637 Guarded by flag_unsafe_math_optimizations as non-finite
10638 imaginary components don't produce scalar results. */
10639 if (flag_unsafe_math_optimizations
10640 && TREE_CODE (arg0) == CONJ_EXPR
10641 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10642 return fold_mult_zconjz (loc, type, arg1);
10643 if (flag_unsafe_math_optimizations
10644 && TREE_CODE (arg1) == CONJ_EXPR
10645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10646 return fold_mult_zconjz (loc, type, arg0);
10648 if (flag_unsafe_math_optimizations)
10650 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10651 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10653 /* Optimizations of root(...)*root(...). */
10654 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10657 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10658 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10660 /* Optimize sqrt(x)*sqrt(x) as x. */
10661 if (BUILTIN_SQRT_P (fcode0)
10662 && operand_equal_p (arg00, arg10, 0)
10663 && ! HONOR_SNANS (TYPE_MODE (type)))
10666 /* Optimize root(x)*root(y) as root(x*y). */
10667 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10668 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10669 return build_call_expr_loc (loc, rootfn, 1, arg);
10672 /* Optimize expN(x)*expN(y) as expN(x+y). */
10673 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10675 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10676 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10677 CALL_EXPR_ARG (arg0, 0),
10678 CALL_EXPR_ARG (arg1, 0));
10679 return build_call_expr_loc (loc, expfn, 1, arg);
10682 /* Optimizations of pow(...)*pow(...). */
10683 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10684 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10685 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10687 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10688 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10689 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10690 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10692 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10693 if (operand_equal_p (arg01, arg11, 0))
10695 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10696 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10698 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10701 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10702 if (operand_equal_p (arg00, arg10, 0))
10704 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10705 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10707 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10711 /* Optimize tan(x)*cos(x) as sin(x). */
10712 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10713 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10714 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10715 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10716 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10717 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10718 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10719 CALL_EXPR_ARG (arg1, 0), 0))
10721 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10723 if (sinfn != NULL_TREE)
10724 return build_call_expr_loc (loc, sinfn, 1,
10725 CALL_EXPR_ARG (arg0, 0));
10728 /* Optimize x*pow(x,c) as pow(x,c+1). */
10729 if (fcode1 == BUILT_IN_POW
10730 || fcode1 == BUILT_IN_POWF
10731 || fcode1 == BUILT_IN_POWL)
10733 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10734 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10735 if (TREE_CODE (arg11) == REAL_CST
10736 && !TREE_OVERFLOW (arg11)
10737 && operand_equal_p (arg0, arg10, 0))
10739 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10743 c = TREE_REAL_CST (arg11);
10744 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10745 arg = build_real (type, c);
10746 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10750 /* Optimize pow(x,c)*x as pow(x,c+1). */
10751 if (fcode0 == BUILT_IN_POW
10752 || fcode0 == BUILT_IN_POWF
10753 || fcode0 == BUILT_IN_POWL)
10755 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10756 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10757 if (TREE_CODE (arg01) == REAL_CST
10758 && !TREE_OVERFLOW (arg01)
10759 && operand_equal_p (arg1, arg00, 0))
10761 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10765 c = TREE_REAL_CST (arg01);
10766 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10767 arg = build_real (type, c);
10768 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10772 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10773 if (!in_gimple_form
10775 && operand_equal_p (arg0, arg1, 0))
10777 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10781 tree arg = build_real (type, dconst2);
10782 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10791 if (integer_all_onesp (arg1))
10792 return omit_one_operand_loc (loc, type, arg1, arg0);
10793 if (integer_zerop (arg1))
10794 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10795 if (operand_equal_p (arg0, arg1, 0))
10796 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10798 /* ~X | X is -1. */
10799 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10800 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10802 t1 = build_zero_cst (type);
10803 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10804 return omit_one_operand_loc (loc, type, t1, arg1);
10807 /* X | ~X is -1. */
10808 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10809 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10811 t1 = build_zero_cst (type);
10812 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10813 return omit_one_operand_loc (loc, type, t1, arg0);
10816 /* Canonicalize (X & C1) | C2. */
10817 if (TREE_CODE (arg0) == BIT_AND_EXPR
10818 && TREE_CODE (arg1) == INTEGER_CST
10819 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10821 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10822 int width = TYPE_PRECISION (type), w;
10823 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10824 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10825 hi2 = TREE_INT_CST_HIGH (arg1);
10826 lo2 = TREE_INT_CST_LOW (arg1);
10828 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10829 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10830 return omit_one_operand_loc (loc, type, arg1,
10831 TREE_OPERAND (arg0, 0));
10833 if (width > HOST_BITS_PER_WIDE_INT)
10835 mhi = (unsigned HOST_WIDE_INT) -1
10836 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10842 mlo = (unsigned HOST_WIDE_INT) -1
10843 >> (HOST_BITS_PER_WIDE_INT - width);
10846 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10847 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10848 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10849 TREE_OPERAND (arg0, 0), arg1);
10851 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10852 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10853 mode which allows further optimizations. */
10860 for (w = BITS_PER_UNIT;
10861 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10864 unsigned HOST_WIDE_INT mask
10865 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10866 if (((lo1 | lo2) & mask) == mask
10867 && (lo1 & ~mask) == 0 && hi1 == 0)
10874 if (hi3 != hi1 || lo3 != lo1)
10875 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10876 fold_build2_loc (loc, BIT_AND_EXPR, type,
10877 TREE_OPERAND (arg0, 0),
10878 build_int_cst_wide (type,
10883 /* (X & Y) | Y is (X, Y). */
10884 if (TREE_CODE (arg0) == BIT_AND_EXPR
10885 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10886 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10887 /* (X & Y) | X is (Y, X). */
10888 if (TREE_CODE (arg0) == BIT_AND_EXPR
10889 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10890 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10891 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10892 /* X | (X & Y) is (Y, X). */
10893 if (TREE_CODE (arg1) == BIT_AND_EXPR
10894 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10895 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10896 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10897 /* X | (Y & X) is (Y, X). */
10898 if (TREE_CODE (arg1) == BIT_AND_EXPR
10899 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10900 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10901 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10903 /* (X & ~Y) | (~X & Y) is X ^ Y */
10904 if (TREE_CODE (arg0) == BIT_AND_EXPR
10905 && TREE_CODE (arg1) == BIT_AND_EXPR)
10907 tree a0, a1, l0, l1, n0, n1;
10909 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10910 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10912 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10913 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10915 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10916 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10918 if ((operand_equal_p (n0, a0, 0)
10919 && operand_equal_p (n1, a1, 0))
10920 || (operand_equal_p (n0, a1, 0)
10921 && operand_equal_p (n1, a0, 0)))
10922 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10925 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10926 if (t1 != NULL_TREE)
10929 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10931 This results in more efficient code for machines without a NAND
10932 instruction. Combine will canonicalize to the first form
10933 which will allow use of NAND instructions provided by the
10934 backend if they exist. */
10935 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10936 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10939 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10940 build2 (BIT_AND_EXPR, type,
10941 fold_convert_loc (loc, type,
10942 TREE_OPERAND (arg0, 0)),
10943 fold_convert_loc (loc, type,
10944 TREE_OPERAND (arg1, 0))));
10947 /* See if this can be simplified into a rotate first. If that
10948 is unsuccessful continue in the association code. */
10952 if (integer_zerop (arg1))
10953 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10954 if (integer_all_onesp (arg1))
10955 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10956 if (operand_equal_p (arg0, arg1, 0))
10957 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10959 /* ~X ^ X is -1. */
10960 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10961 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10963 t1 = build_zero_cst (type);
10964 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10965 return omit_one_operand_loc (loc, type, t1, arg1);
10968 /* X ^ ~X is -1. */
10969 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10970 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10972 t1 = build_zero_cst (type);
10973 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10974 return omit_one_operand_loc (loc, type, t1, arg0);
10977 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10978 with a constant, and the two constants have no bits in common,
10979 we should treat this as a BIT_IOR_EXPR since this may produce more
10980 simplifications. */
10981 if (TREE_CODE (arg0) == BIT_AND_EXPR
10982 && TREE_CODE (arg1) == BIT_AND_EXPR
10983 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10984 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10985 && integer_zerop (const_binop (BIT_AND_EXPR,
10986 TREE_OPERAND (arg0, 1),
10987 TREE_OPERAND (arg1, 1))))
10989 code = BIT_IOR_EXPR;
10993 /* (X | Y) ^ X -> Y & ~ X*/
10994 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10995 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10997 tree t2 = TREE_OPERAND (arg0, 1);
10998 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11000 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11001 fold_convert_loc (loc, type, t2),
11002 fold_convert_loc (loc, type, t1));
11006 /* (Y | X) ^ X -> Y & ~ X*/
11007 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11008 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11010 tree t2 = TREE_OPERAND (arg0, 0);
11011 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11013 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11014 fold_convert_loc (loc, type, t2),
11015 fold_convert_loc (loc, type, t1));
11019 /* X ^ (X | Y) -> Y & ~ X*/
11020 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11021 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11023 tree t2 = TREE_OPERAND (arg1, 1);
11024 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11026 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11027 fold_convert_loc (loc, type, t2),
11028 fold_convert_loc (loc, type, t1));
11032 /* X ^ (Y | X) -> Y & ~ X*/
11033 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11034 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11036 tree t2 = TREE_OPERAND (arg1, 0);
11037 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11039 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11040 fold_convert_loc (loc, type, t2),
11041 fold_convert_loc (loc, type, t1));
11045 /* Convert ~X ^ ~Y to X ^ Y. */
11046 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11047 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11048 return fold_build2_loc (loc, code, type,
11049 fold_convert_loc (loc, type,
11050 TREE_OPERAND (arg0, 0)),
11051 fold_convert_loc (loc, type,
11052 TREE_OPERAND (arg1, 0)));
11054 /* Convert ~X ^ C to X ^ ~C. */
11055 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11056 && TREE_CODE (arg1) == INTEGER_CST)
11057 return fold_build2_loc (loc, code, type,
11058 fold_convert_loc (loc, type,
11059 TREE_OPERAND (arg0, 0)),
11060 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11062 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11063 if (TREE_CODE (arg0) == BIT_AND_EXPR
11064 && integer_onep (TREE_OPERAND (arg0, 1))
11065 && integer_onep (arg1))
11066 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11067 build_int_cst (TREE_TYPE (arg0), 0));
11069 /* Fold (X & Y) ^ Y as ~X & Y. */
11070 if (TREE_CODE (arg0) == BIT_AND_EXPR
11071 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11073 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11074 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11075 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11076 fold_convert_loc (loc, type, arg1));
11078 /* Fold (X & Y) ^ X as ~Y & X. */
11079 if (TREE_CODE (arg0) == BIT_AND_EXPR
11080 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11081 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11083 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11084 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11085 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11086 fold_convert_loc (loc, type, arg1));
11088 /* Fold X ^ (X & Y) as X & ~Y. */
11089 if (TREE_CODE (arg1) == BIT_AND_EXPR
11090 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11092 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11093 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11094 fold_convert_loc (loc, type, arg0),
11095 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11097 /* Fold X ^ (Y & X) as ~Y & X. */
11098 if (TREE_CODE (arg1) == BIT_AND_EXPR
11099 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11100 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11102 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11103 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11104 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11105 fold_convert_loc (loc, type, arg0));
11108 /* See if this can be simplified into a rotate first. If that
11109 is unsuccessful continue in the association code. */
11113 if (integer_all_onesp (arg1))
11114 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11115 if (integer_zerop (arg1))
11116 return omit_one_operand_loc (loc, type, arg1, arg0);
11117 if (operand_equal_p (arg0, arg1, 0))
11118 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11120 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11121 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11122 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11123 || (TREE_CODE (arg0) == EQ_EXPR
11124 && integer_zerop (TREE_OPERAND (arg0, 1))))
11125 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11126 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11128 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11129 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11130 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11131 || (TREE_CODE (arg1) == EQ_EXPR
11132 && integer_zerop (TREE_OPERAND (arg1, 1))))
11133 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11134 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11136 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11137 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11138 && TREE_CODE (arg1) == INTEGER_CST
11139 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11141 tree tmp1 = fold_convert_loc (loc, type, arg1);
11142 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11143 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11144 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11145 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11147 fold_convert_loc (loc, type,
11148 fold_build2_loc (loc, BIT_IOR_EXPR,
11149 type, tmp2, tmp3));
11152 /* (X | Y) & Y is (X, Y). */
11153 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11154 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11155 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11156 /* (X | Y) & X is (Y, X). */
11157 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11158 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11159 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11160 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11161 /* X & (X | Y) is (Y, X). */
11162 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11163 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11164 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11165 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11166 /* X & (Y | X) is (Y, X). */
11167 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11168 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11169 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11170 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11172 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11173 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11174 && integer_onep (TREE_OPERAND (arg0, 1))
11175 && integer_onep (arg1))
11177 tem = TREE_OPERAND (arg0, 0);
11178 return fold_build2_loc (loc, EQ_EXPR, type,
11179 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11180 build_int_cst (TREE_TYPE (tem), 1)),
11181 build_int_cst (TREE_TYPE (tem), 0));
11183 /* Fold ~X & 1 as (X & 1) == 0. */
11184 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11185 && integer_onep (arg1))
11187 tem = TREE_OPERAND (arg0, 0);
11188 return fold_build2_loc (loc, EQ_EXPR, type,
11189 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11190 build_int_cst (TREE_TYPE (tem), 1)),
11191 build_int_cst (TREE_TYPE (tem), 0));
11193 /* Fold !X & 1 as X == 0. */
11194 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11195 && integer_onep (arg1))
11197 tem = TREE_OPERAND (arg0, 0);
11198 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11199 build_int_cst (TREE_TYPE (tem), 0));
11202 /* Fold (X ^ Y) & Y as ~X & Y. */
11203 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11204 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11206 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11207 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11208 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11209 fold_convert_loc (loc, type, arg1));
11211 /* Fold (X ^ Y) & X as ~Y & X. */
11212 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11214 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11216 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11217 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11218 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11219 fold_convert_loc (loc, type, arg1));
11221 /* Fold X & (X ^ Y) as X & ~Y. */
11222 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11223 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11225 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11226 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11227 fold_convert_loc (loc, type, arg0),
11228 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11230 /* Fold X & (Y ^ X) as ~Y & X. */
11231 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11232 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11233 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11235 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11236 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11237 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11238 fold_convert_loc (loc, type, arg0));
11241 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11242 ((A & N) + B) & M -> (A + B) & M
11243 Similarly if (N & M) == 0,
11244 ((A | N) + B) & M -> (A + B) & M
11245 and for - instead of + (or unary - instead of +)
11246 and/or ^ instead of |.
11247 If B is constant and (B & M) == 0, fold into A & M. */
11248 if (host_integerp (arg1, 1))
11250 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11251 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11252 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11253 && (TREE_CODE (arg0) == PLUS_EXPR
11254 || TREE_CODE (arg0) == MINUS_EXPR
11255 || TREE_CODE (arg0) == NEGATE_EXPR)
11256 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11257 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11261 unsigned HOST_WIDE_INT cst0;
11263 /* Now we know that arg0 is (C + D) or (C - D) or
11264 -C and arg1 (M) is == (1LL << cst) - 1.
11265 Store C into PMOP[0] and D into PMOP[1]. */
11266 pmop[0] = TREE_OPERAND (arg0, 0);
11268 if (TREE_CODE (arg0) != NEGATE_EXPR)
11270 pmop[1] = TREE_OPERAND (arg0, 1);
11274 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11275 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11279 for (; which >= 0; which--)
11280 switch (TREE_CODE (pmop[which]))
11285 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11288 /* tree_low_cst not used, because we don't care about
11290 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11292 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11297 else if (cst0 != 0)
11299 /* If C or D is of the form (A & N) where
11300 (N & M) == M, or of the form (A | N) or
11301 (A ^ N) where (N & M) == 0, replace it with A. */
11302 pmop[which] = TREE_OPERAND (pmop[which], 0);
11305 /* If C or D is a N where (N & M) == 0, it can be
11306 omitted (assumed 0). */
11307 if ((TREE_CODE (arg0) == PLUS_EXPR
11308 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11309 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11310 pmop[which] = NULL;
11316 /* Only build anything new if we optimized one or both arguments
11318 if (pmop[0] != TREE_OPERAND (arg0, 0)
11319 || (TREE_CODE (arg0) != NEGATE_EXPR
11320 && pmop[1] != TREE_OPERAND (arg0, 1)))
11322 tree utype = TREE_TYPE (arg0);
11323 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11325 /* Perform the operations in a type that has defined
11326 overflow behavior. */
11327 utype = unsigned_type_for (TREE_TYPE (arg0));
11328 if (pmop[0] != NULL)
11329 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11330 if (pmop[1] != NULL)
11331 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11334 if (TREE_CODE (arg0) == NEGATE_EXPR)
11335 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11336 else if (TREE_CODE (arg0) == PLUS_EXPR)
11338 if (pmop[0] != NULL && pmop[1] != NULL)
11339 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11341 else if (pmop[0] != NULL)
11343 else if (pmop[1] != NULL)
11346 return build_int_cst (type, 0);
11348 else if (pmop[0] == NULL)
11349 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11351 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11353 /* TEM is now the new binary +, - or unary - replacement. */
11354 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11355 fold_convert_loc (loc, utype, arg1));
11356 return fold_convert_loc (loc, type, tem);
11361 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11362 if (t1 != NULL_TREE)
11364 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11365 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11366 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11369 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11371 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11372 && (~TREE_INT_CST_LOW (arg1)
11373 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11375 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11378 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11380 This results in more efficient code for machines without a NOR
11381 instruction. Combine will canonicalize to the first form
11382 which will allow use of NOR instructions provided by the
11383 backend if they exist. */
11384 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11385 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11387 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11388 build2 (BIT_IOR_EXPR, type,
11389 fold_convert_loc (loc, type,
11390 TREE_OPERAND (arg0, 0)),
11391 fold_convert_loc (loc, type,
11392 TREE_OPERAND (arg1, 0))));
11395 /* If arg0 is derived from the address of an object or function, we may
11396 be able to fold this expression using the object or function's
11398 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11400 unsigned HOST_WIDE_INT modulus, residue;
11401 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11403 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11404 integer_onep (arg1));
11406 /* This works because modulus is a power of 2. If this weren't the
11407 case, we'd have to replace it by its greatest power-of-2
11408 divisor: modulus & -modulus. */
11410 return build_int_cst (type, residue & low);
11413 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11414 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11415 if the new mask might be further optimized. */
11416 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11417 || TREE_CODE (arg0) == RSHIFT_EXPR)
11418 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11419 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11420 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11421 < TYPE_PRECISION (TREE_TYPE (arg0))
11422 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11423 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11425 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11426 unsigned HOST_WIDE_INT mask
11427 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11428 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11429 tree shift_type = TREE_TYPE (arg0);
11431 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11432 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11433 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11434 && TYPE_PRECISION (TREE_TYPE (arg0))
11435 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11437 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11438 tree arg00 = TREE_OPERAND (arg0, 0);
11439 /* See if more bits can be proven as zero because of
11441 if (TREE_CODE (arg00) == NOP_EXPR
11442 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11444 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11445 if (TYPE_PRECISION (inner_type)
11446 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11447 && TYPE_PRECISION (inner_type) < prec)
11449 prec = TYPE_PRECISION (inner_type);
11450 /* See if we can shorten the right shift. */
11452 shift_type = inner_type;
11455 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11456 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11457 zerobits <<= prec - shiftc;
11458 /* For arithmetic shift if sign bit could be set, zerobits
11459 can contain actually sign bits, so no transformation is
11460 possible, unless MASK masks them all away. In that
11461 case the shift needs to be converted into logical shift. */
11462 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11463 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11465 if ((mask & zerobits) == 0)
11466 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11472 /* ((X << 16) & 0xff00) is (X, 0). */
11473 if ((mask & zerobits) == mask)
11474 return omit_one_operand_loc (loc, type,
11475 build_int_cst (type, 0), arg0);
11477 newmask = mask | zerobits;
11478 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11482 /* Only do the transformation if NEWMASK is some integer
11484 for (prec = BITS_PER_UNIT;
11485 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11486 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11488 if (prec < HOST_BITS_PER_WIDE_INT
11489 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11493 if (shift_type != TREE_TYPE (arg0))
11495 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11496 fold_convert_loc (loc, shift_type,
11497 TREE_OPERAND (arg0, 0)),
11498 TREE_OPERAND (arg0, 1));
11499 tem = fold_convert_loc (loc, type, tem);
11503 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11504 if (!tree_int_cst_equal (newmaskt, arg1))
11505 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11513 /* Don't touch a floating-point divide by zero unless the mode
11514 of the constant can represent infinity. */
11515 if (TREE_CODE (arg1) == REAL_CST
11516 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11517 && real_zerop (arg1))
11520 /* Optimize A / A to 1.0 if we don't care about
11521 NaNs or Infinities. Skip the transformation
11522 for non-real operands. */
11523 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11524 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11525 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11526 && operand_equal_p (arg0, arg1, 0))
11528 tree r = build_real (TREE_TYPE (arg0), dconst1);
11530 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11533 /* The complex version of the above A / A optimization. */
11534 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11535 && operand_equal_p (arg0, arg1, 0))
11537 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11538 if (! HONOR_NANS (TYPE_MODE (elem_type))
11539 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11541 tree r = build_real (elem_type, dconst1);
11542 /* omit_two_operands will call fold_convert for us. */
11543 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11547 /* (-A) / (-B) -> A / B */
11548 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11549 return fold_build2_loc (loc, RDIV_EXPR, type,
11550 TREE_OPERAND (arg0, 0),
11551 negate_expr (arg1));
11552 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11553 return fold_build2_loc (loc, RDIV_EXPR, type,
11554 negate_expr (arg0),
11555 TREE_OPERAND (arg1, 0));
11557 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11558 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11559 && real_onep (arg1))
11560 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11562 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11563 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11564 && real_minus_onep (arg1))
11565 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11566 negate_expr (arg0)));
11568 /* If ARG1 is a constant, we can convert this to a multiply by the
11569 reciprocal. This does not have the same rounding properties,
11570 so only do this if -freciprocal-math. We can actually
11571 always safely do it if ARG1 is a power of two, but it's hard to
11572 tell if it is or not in a portable manner. */
11573 if (TREE_CODE (arg1) == REAL_CST)
11575 if (flag_reciprocal_math
11576 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11578 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11579 /* Find the reciprocal if optimizing and the result is exact. */
11583 r = TREE_REAL_CST (arg1);
11584 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11586 tem = build_real (type, r);
11587 return fold_build2_loc (loc, MULT_EXPR, type,
11588 fold_convert_loc (loc, type, arg0), tem);
11592 /* Convert A/B/C to A/(B*C). */
11593 if (flag_reciprocal_math
11594 && TREE_CODE (arg0) == RDIV_EXPR)
11595 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11596 fold_build2_loc (loc, MULT_EXPR, type,
11597 TREE_OPERAND (arg0, 1), arg1));
11599 /* Convert A/(B/C) to (A/B)*C. */
11600 if (flag_reciprocal_math
11601 && TREE_CODE (arg1) == RDIV_EXPR)
11602 return fold_build2_loc (loc, MULT_EXPR, type,
11603 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11604 TREE_OPERAND (arg1, 0)),
11605 TREE_OPERAND (arg1, 1));
11607 /* Convert C1/(X*C2) into (C1/C2)/X. */
11608 if (flag_reciprocal_math
11609 && TREE_CODE (arg1) == MULT_EXPR
11610 && TREE_CODE (arg0) == REAL_CST
11611 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11613 tree tem = const_binop (RDIV_EXPR, arg0,
11614 TREE_OPERAND (arg1, 1));
11616 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11617 TREE_OPERAND (arg1, 0));
11620 if (flag_unsafe_math_optimizations)
11622 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11623 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11625 /* Optimize sin(x)/cos(x) as tan(x). */
11626 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11627 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11628 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11629 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11630 CALL_EXPR_ARG (arg1, 0), 0))
11632 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11634 if (tanfn != NULL_TREE)
11635 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11638 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11639 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11640 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11641 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11642 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11643 CALL_EXPR_ARG (arg1, 0), 0))
11645 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11647 if (tanfn != NULL_TREE)
11649 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11650 CALL_EXPR_ARG (arg0, 0));
11651 return fold_build2_loc (loc, RDIV_EXPR, type,
11652 build_real (type, dconst1), tmp);
11656 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11657 NaNs or Infinities. */
11658 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11659 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11660 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11662 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11663 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11665 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11666 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11667 && operand_equal_p (arg00, arg01, 0))
11669 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11671 if (cosfn != NULL_TREE)
11672 return build_call_expr_loc (loc, cosfn, 1, arg00);
11676 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11677 NaNs or Infinities. */
11678 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11679 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11680 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11682 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11683 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11685 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11686 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11687 && operand_equal_p (arg00, arg01, 0))
11689 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11691 if (cosfn != NULL_TREE)
11693 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11694 return fold_build2_loc (loc, RDIV_EXPR, type,
11695 build_real (type, dconst1),
11701 /* Optimize pow(x,c)/x as pow(x,c-1). */
11702 if (fcode0 == BUILT_IN_POW
11703 || fcode0 == BUILT_IN_POWF
11704 || fcode0 == BUILT_IN_POWL)
11706 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11707 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11708 if (TREE_CODE (arg01) == REAL_CST
11709 && !TREE_OVERFLOW (arg01)
11710 && operand_equal_p (arg1, arg00, 0))
11712 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11716 c = TREE_REAL_CST (arg01);
11717 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11718 arg = build_real (type, c);
11719 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11723 /* Optimize a/root(b/c) into a*root(c/b). */
11724 if (BUILTIN_ROOT_P (fcode1))
11726 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11728 if (TREE_CODE (rootarg) == RDIV_EXPR)
11730 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11731 tree b = TREE_OPERAND (rootarg, 0);
11732 tree c = TREE_OPERAND (rootarg, 1);
11734 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11736 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11737 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11741 /* Optimize x/expN(y) into x*expN(-y). */
11742 if (BUILTIN_EXPONENT_P (fcode1))
11744 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11745 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11746 arg1 = build_call_expr_loc (loc,
11748 fold_convert_loc (loc, type, arg));
11749 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11752 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11753 if (fcode1 == BUILT_IN_POW
11754 || fcode1 == BUILT_IN_POWF
11755 || fcode1 == BUILT_IN_POWL)
11757 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11758 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11759 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11760 tree neg11 = fold_convert_loc (loc, type,
11761 negate_expr (arg11));
11762 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11763 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11768 case TRUNC_DIV_EXPR:
11769 /* Optimize (X & (-A)) / A where A is a power of 2,
11771 if (TREE_CODE (arg0) == BIT_AND_EXPR
11772 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11773 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11775 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11776 arg1, TREE_OPERAND (arg0, 1));
11777 if (sum && integer_zerop (sum)) {
11778 unsigned long pow2;
11780 if (TREE_INT_CST_LOW (arg1))
11781 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11783 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11784 + HOST_BITS_PER_WIDE_INT;
11786 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11787 TREE_OPERAND (arg0, 0),
11788 build_int_cst (integer_type_node, pow2));
11794 case FLOOR_DIV_EXPR:
11795 /* Simplify A / (B << N) where A and B are positive and B is
11796 a power of 2, to A >> (N + log2(B)). */
11797 strict_overflow_p = false;
11798 if (TREE_CODE (arg1) == LSHIFT_EXPR
11799 && (TYPE_UNSIGNED (type)
11800 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11802 tree sval = TREE_OPERAND (arg1, 0);
11803 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11805 tree sh_cnt = TREE_OPERAND (arg1, 1);
11806 unsigned long pow2;
11808 if (TREE_INT_CST_LOW (sval))
11809 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11811 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11812 + HOST_BITS_PER_WIDE_INT;
11814 if (strict_overflow_p)
11815 fold_overflow_warning (("assuming signed overflow does not "
11816 "occur when simplifying A / (B << N)"),
11817 WARN_STRICT_OVERFLOW_MISC);
11819 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11821 build_int_cst (TREE_TYPE (sh_cnt),
11823 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11824 fold_convert_loc (loc, type, arg0), sh_cnt);
11828 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11829 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11830 if (INTEGRAL_TYPE_P (type)
11831 && TYPE_UNSIGNED (type)
11832 && code == FLOOR_DIV_EXPR)
11833 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11837 case ROUND_DIV_EXPR:
11838 case CEIL_DIV_EXPR:
11839 case EXACT_DIV_EXPR:
11840 if (integer_onep (arg1))
11841 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11842 if (integer_zerop (arg1))
11844 /* X / -1 is -X. */
11845 if (!TYPE_UNSIGNED (type)
11846 && TREE_CODE (arg1) == INTEGER_CST
11847 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11848 && TREE_INT_CST_HIGH (arg1) == -1)
11849 return fold_convert_loc (loc, type, negate_expr (arg0));
11851 /* Convert -A / -B to A / B when the type is signed and overflow is
11853 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11854 && TREE_CODE (arg0) == NEGATE_EXPR
11855 && negate_expr_p (arg1))
11857 if (INTEGRAL_TYPE_P (type))
11858 fold_overflow_warning (("assuming signed overflow does not occur "
11859 "when distributing negation across "
11861 WARN_STRICT_OVERFLOW_MISC);
11862 return fold_build2_loc (loc, code, type,
11863 fold_convert_loc (loc, type,
11864 TREE_OPERAND (arg0, 0)),
11865 fold_convert_loc (loc, type,
11866 negate_expr (arg1)));
11868 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11869 && TREE_CODE (arg1) == NEGATE_EXPR
11870 && negate_expr_p (arg0))
11872 if (INTEGRAL_TYPE_P (type))
11873 fold_overflow_warning (("assuming signed overflow does not occur "
11874 "when distributing negation across "
11876 WARN_STRICT_OVERFLOW_MISC);
11877 return fold_build2_loc (loc, code, type,
11878 fold_convert_loc (loc, type,
11879 negate_expr (arg0)),
11880 fold_convert_loc (loc, type,
11881 TREE_OPERAND (arg1, 0)));
11884 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11885 operation, EXACT_DIV_EXPR.
11887 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11888 At one time others generated faster code, it's not clear if they do
11889 after the last round to changes to the DIV code in expmed.c. */
11890 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11891 && multiple_of_p (type, arg0, arg1))
11892 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11894 strict_overflow_p = false;
11895 if (TREE_CODE (arg1) == INTEGER_CST
11896 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11897 &strict_overflow_p)))
11899 if (strict_overflow_p)
11900 fold_overflow_warning (("assuming signed overflow does not occur "
11901 "when simplifying division"),
11902 WARN_STRICT_OVERFLOW_MISC);
11903 return fold_convert_loc (loc, type, tem);
11908 case CEIL_MOD_EXPR:
11909 case FLOOR_MOD_EXPR:
11910 case ROUND_MOD_EXPR:
11911 case TRUNC_MOD_EXPR:
11912 /* X % 1 is always zero, but be sure to preserve any side
11914 if (integer_onep (arg1))
11915 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11917 /* X % 0, return X % 0 unchanged so that we can get the
11918 proper warnings and errors. */
11919 if (integer_zerop (arg1))
11922 /* 0 % X is always zero, but be sure to preserve any side
11923 effects in X. Place this after checking for X == 0. */
11924 if (integer_zerop (arg0))
11925 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11927 /* X % -1 is zero. */
11928 if (!TYPE_UNSIGNED (type)
11929 && TREE_CODE (arg1) == INTEGER_CST
11930 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11931 && TREE_INT_CST_HIGH (arg1) == -1)
11932 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11934 /* X % -C is the same as X % C. */
11935 if (code == TRUNC_MOD_EXPR
11936 && !TYPE_UNSIGNED (type)
11937 && TREE_CODE (arg1) == INTEGER_CST
11938 && !TREE_OVERFLOW (arg1)
11939 && TREE_INT_CST_HIGH (arg1) < 0
11940 && !TYPE_OVERFLOW_TRAPS (type)
11941 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11942 && !sign_bit_p (arg1, arg1))
11943 return fold_build2_loc (loc, code, type,
11944 fold_convert_loc (loc, type, arg0),
11945 fold_convert_loc (loc, type,
11946 negate_expr (arg1)));
11948 /* X % -Y is the same as X % Y. */
11949 if (code == TRUNC_MOD_EXPR
11950 && !TYPE_UNSIGNED (type)
11951 && TREE_CODE (arg1) == NEGATE_EXPR
11952 && !TYPE_OVERFLOW_TRAPS (type))
11953 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11954 fold_convert_loc (loc, type,
11955 TREE_OPERAND (arg1, 0)));
11957 strict_overflow_p = false;
11958 if (TREE_CODE (arg1) == INTEGER_CST
11959 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11960 &strict_overflow_p)))
11962 if (strict_overflow_p)
11963 fold_overflow_warning (("assuming signed overflow does not occur "
11964 "when simplifying modulus"),
11965 WARN_STRICT_OVERFLOW_MISC);
11966 return fold_convert_loc (loc, type, tem);
11969 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11970 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11971 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11972 && (TYPE_UNSIGNED (type)
11973 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11976 /* Also optimize A % (C << N) where C is a power of 2,
11977 to A & ((C << N) - 1). */
11978 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11979 c = TREE_OPERAND (arg1, 0);
11981 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11984 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11985 build_int_cst (TREE_TYPE (arg1), 1));
11986 if (strict_overflow_p)
11987 fold_overflow_warning (("assuming signed overflow does not "
11988 "occur when simplifying "
11989 "X % (power of two)"),
11990 WARN_STRICT_OVERFLOW_MISC);
11991 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11992 fold_convert_loc (loc, type, arg0),
11993 fold_convert_loc (loc, type, mask));
12001 if (integer_all_onesp (arg0))
12002 return omit_one_operand_loc (loc, type, arg0, arg1);
12006 /* Optimize -1 >> x for arithmetic right shifts. */
12007 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12008 && tree_expr_nonnegative_p (arg1))
12009 return omit_one_operand_loc (loc, type, arg0, arg1);
12010 /* ... fall through ... */
12014 if (integer_zerop (arg1))
12015 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12016 if (integer_zerop (arg0))
12017 return omit_one_operand_loc (loc, type, arg0, arg1);
12019 /* Since negative shift count is not well-defined,
12020 don't try to compute it in the compiler. */
12021 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12024 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12025 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12026 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12027 && host_integerp (TREE_OPERAND (arg0, 1), false)
12028 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12030 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12031 + TREE_INT_CST_LOW (arg1));
12033 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12034 being well defined. */
12035 if (low >= TYPE_PRECISION (type))
12037 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12038 low = low % TYPE_PRECISION (type);
12039 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12040 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12041 TREE_OPERAND (arg0, 0));
12043 low = TYPE_PRECISION (type) - 1;
12046 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12047 build_int_cst (type, low));
12050 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12051 into x & ((unsigned)-1 >> c) for unsigned types. */
12052 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12053 || (TYPE_UNSIGNED (type)
12054 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12055 && host_integerp (arg1, false)
12056 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12057 && host_integerp (TREE_OPERAND (arg0, 1), false)
12058 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12060 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12061 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12067 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12069 lshift = build_int_cst (type, -1);
12070 lshift = int_const_binop (code, lshift, arg1);
12072 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12076 /* Rewrite an LROTATE_EXPR by a constant into an
12077 RROTATE_EXPR by a new constant. */
12078 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12080 tree tem = build_int_cst (TREE_TYPE (arg1),
12081 TYPE_PRECISION (type));
12082 tem = const_binop (MINUS_EXPR, tem, arg1);
12083 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12086 /* If we have a rotate of a bit operation with the rotate count and
12087 the second operand of the bit operation both constant,
12088 permute the two operations. */
12089 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12090 && (TREE_CODE (arg0) == BIT_AND_EXPR
12091 || TREE_CODE (arg0) == BIT_IOR_EXPR
12092 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12093 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12094 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12095 fold_build2_loc (loc, code, type,
12096 TREE_OPERAND (arg0, 0), arg1),
12097 fold_build2_loc (loc, code, type,
12098 TREE_OPERAND (arg0, 1), arg1));
12100 /* Two consecutive rotates adding up to the precision of the
12101 type can be ignored. */
12102 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12103 && TREE_CODE (arg0) == RROTATE_EXPR
12104 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12105 && TREE_INT_CST_HIGH (arg1) == 0
12106 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12107 && ((TREE_INT_CST_LOW (arg1)
12108 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12109 == (unsigned int) TYPE_PRECISION (type)))
12110 return TREE_OPERAND (arg0, 0);
12112 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12113 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12114 if the latter can be further optimized. */
12115 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12116 && TREE_CODE (arg0) == BIT_AND_EXPR
12117 && TREE_CODE (arg1) == INTEGER_CST
12118 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12120 tree mask = fold_build2_loc (loc, code, type,
12121 fold_convert_loc (loc, type,
12122 TREE_OPERAND (arg0, 1)),
12124 tree shift = fold_build2_loc (loc, code, type,
12125 fold_convert_loc (loc, type,
12126 TREE_OPERAND (arg0, 0)),
12128 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12136 if (operand_equal_p (arg0, arg1, 0))
12137 return omit_one_operand_loc (loc, type, arg0, arg1);
12138 if (INTEGRAL_TYPE_P (type)
12139 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12140 return omit_one_operand_loc (loc, type, arg1, arg0);
12141 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12147 if (operand_equal_p (arg0, arg1, 0))
12148 return omit_one_operand_loc (loc, type, arg0, arg1);
12149 if (INTEGRAL_TYPE_P (type)
12150 && TYPE_MAX_VALUE (type)
12151 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12152 return omit_one_operand_loc (loc, type, arg1, arg0);
12153 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12158 case TRUTH_ANDIF_EXPR:
12159 /* Note that the operands of this must be ints
12160 and their values must be 0 or 1.
12161 ("true" is a fixed value perhaps depending on the language.) */
12162 /* If first arg is constant zero, return it. */
12163 if (integer_zerop (arg0))
12164 return fold_convert_loc (loc, type, arg0);
12165 case TRUTH_AND_EXPR:
12166 /* If either arg is constant true, drop it. */
12167 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12168 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12169 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12170 /* Preserve sequence points. */
12171 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12172 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12173 /* If second arg is constant zero, result is zero, but first arg
12174 must be evaluated. */
12175 if (integer_zerop (arg1))
12176 return omit_one_operand_loc (loc, type, arg1, arg0);
12177 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12178 case will be handled here. */
12179 if (integer_zerop (arg0))
12180 return omit_one_operand_loc (loc, type, arg0, arg1);
12182 /* !X && X is always false. */
12183 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12184 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12185 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12186 /* X && !X is always false. */
12187 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12188 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12189 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12191 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12192 means A >= Y && A != MAX, but in this case we know that
12195 if (!TREE_SIDE_EFFECTS (arg0)
12196 && !TREE_SIDE_EFFECTS (arg1))
12198 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12199 if (tem && !operand_equal_p (tem, arg0, 0))
12200 return fold_build2_loc (loc, code, type, tem, arg1);
12202 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12203 if (tem && !operand_equal_p (tem, arg1, 0))
12204 return fold_build2_loc (loc, code, type, arg0, tem);
12207 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12213 case TRUTH_ORIF_EXPR:
12214 /* Note that the operands of this must be ints
12215 and their values must be 0 or true.
12216 ("true" is a fixed value perhaps depending on the language.) */
12217 /* If first arg is constant true, return it. */
12218 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12219 return fold_convert_loc (loc, type, arg0);
12220 case TRUTH_OR_EXPR:
12221 /* If either arg is constant zero, drop it. */
12222 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12223 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12224 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12225 /* Preserve sequence points. */
12226 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12227 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12228 /* If second arg is constant true, result is true, but we must
12229 evaluate first arg. */
12230 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12231 return omit_one_operand_loc (loc, type, arg1, arg0);
12232 /* Likewise for first arg, but note this only occurs here for
12234 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12235 return omit_one_operand_loc (loc, type, arg0, arg1);
12237 /* !X || X is always true. */
12238 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12239 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12240 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12241 /* X || !X is always true. */
12242 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12243 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12244 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12246 /* (X && !Y) || (!X && Y) is X ^ Y */
12247 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12248 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12250 tree a0, a1, l0, l1, n0, n1;
12252 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12253 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12255 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12256 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12258 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12259 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12261 if ((operand_equal_p (n0, a0, 0)
12262 && operand_equal_p (n1, a1, 0))
12263 || (operand_equal_p (n0, a1, 0)
12264 && operand_equal_p (n1, a0, 0)))
12265 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12268 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12274 case TRUTH_XOR_EXPR:
12275 /* If the second arg is constant zero, drop it. */
12276 if (integer_zerop (arg1))
12277 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12278 /* If the second arg is constant true, this is a logical inversion. */
12279 if (integer_onep (arg1))
12281 /* Only call invert_truthvalue if operand is a truth value. */
12282 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12283 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12285 tem = invert_truthvalue_loc (loc, arg0);
12286 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12288 /* Identical arguments cancel to zero. */
12289 if (operand_equal_p (arg0, arg1, 0))
12290 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12292 /* !X ^ X is always true. */
12293 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12294 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12295 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12297 /* X ^ !X is always true. */
12298 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12299 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12300 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12309 tem = fold_comparison (loc, code, type, op0, op1);
12310 if (tem != NULL_TREE)
12313 /* bool_var != 0 becomes bool_var. */
12314 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12315 && code == NE_EXPR)
12316 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12318 /* bool_var == 1 becomes bool_var. */
12319 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12320 && code == EQ_EXPR)
12321 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12323 /* bool_var != 1 becomes !bool_var. */
12324 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12325 && code == NE_EXPR)
12326 return fold_convert_loc (loc, type,
12327 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12328 TREE_TYPE (arg0), arg0));
12330 /* bool_var == 0 becomes !bool_var. */
12331 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12332 && code == EQ_EXPR)
12333 return fold_convert_loc (loc, type,
12334 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12335 TREE_TYPE (arg0), arg0));
12337 /* !exp != 0 becomes !exp */
12338 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12339 && code == NE_EXPR)
12340 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12342 /* If this is an equality comparison of the address of two non-weak,
12343 unaliased symbols neither of which are extern (since we do not
12344 have access to attributes for externs), then we know the result. */
12345 if (TREE_CODE (arg0) == ADDR_EXPR
12346 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12347 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12348 && ! lookup_attribute ("alias",
12349 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12350 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12351 && TREE_CODE (arg1) == ADDR_EXPR
12352 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12353 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12354 && ! lookup_attribute ("alias",
12355 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12356 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12358 /* We know that we're looking at the address of two
12359 non-weak, unaliased, static _DECL nodes.
12361 It is both wasteful and incorrect to call operand_equal_p
12362 to compare the two ADDR_EXPR nodes. It is wasteful in that
12363 all we need to do is test pointer equality for the arguments
12364 to the two ADDR_EXPR nodes. It is incorrect to use
12365 operand_equal_p as that function is NOT equivalent to a
12366 C equality test. It can in fact return false for two
12367 objects which would test as equal using the C equality
12369 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12370 return constant_boolean_node (equal
12371 ? code == EQ_EXPR : code != EQ_EXPR,
12375 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12376 a MINUS_EXPR of a constant, we can convert it into a comparison with
12377 a revised constant as long as no overflow occurs. */
12378 if (TREE_CODE (arg1) == INTEGER_CST
12379 && (TREE_CODE (arg0) == PLUS_EXPR
12380 || TREE_CODE (arg0) == MINUS_EXPR)
12381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12382 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12383 ? MINUS_EXPR : PLUS_EXPR,
12384 fold_convert_loc (loc, TREE_TYPE (arg0),
12386 TREE_OPERAND (arg0, 1)))
12387 && !TREE_OVERFLOW (tem))
12388 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12390 /* Similarly for a NEGATE_EXPR. */
12391 if (TREE_CODE (arg0) == NEGATE_EXPR
12392 && TREE_CODE (arg1) == INTEGER_CST
12393 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12395 && TREE_CODE (tem) == INTEGER_CST
12396 && !TREE_OVERFLOW (tem))
12397 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12399 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12400 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12401 && TREE_CODE (arg1) == INTEGER_CST
12402 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12403 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12404 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12405 fold_convert_loc (loc,
12408 TREE_OPERAND (arg0, 1)));
12410 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12411 if ((TREE_CODE (arg0) == PLUS_EXPR
12412 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12413 || TREE_CODE (arg0) == MINUS_EXPR)
12414 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12417 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12418 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12420 tree val = TREE_OPERAND (arg0, 1);
12421 return omit_two_operands_loc (loc, type,
12422 fold_build2_loc (loc, code, type,
12424 build_int_cst (TREE_TYPE (val),
12426 TREE_OPERAND (arg0, 0), arg1);
12429 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12430 if (TREE_CODE (arg0) == MINUS_EXPR
12431 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12432 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12435 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12437 return omit_two_operands_loc (loc, type,
12439 ? boolean_true_node : boolean_false_node,
12440 TREE_OPERAND (arg0, 1), arg1);
12443 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12444 for !=. Don't do this for ordered comparisons due to overflow. */
12445 if (TREE_CODE (arg0) == MINUS_EXPR
12446 && integer_zerop (arg1))
12447 return fold_build2_loc (loc, code, type,
12448 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12450 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12451 if (TREE_CODE (arg0) == ABS_EXPR
12452 && (integer_zerop (arg1) || real_zerop (arg1)))
12453 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12455 /* If this is an EQ or NE comparison with zero and ARG0 is
12456 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12457 two operations, but the latter can be done in one less insn
12458 on machines that have only two-operand insns or on which a
12459 constant cannot be the first operand. */
12460 if (TREE_CODE (arg0) == BIT_AND_EXPR
12461 && integer_zerop (arg1))
12463 tree arg00 = TREE_OPERAND (arg0, 0);
12464 tree arg01 = TREE_OPERAND (arg0, 1);
12465 if (TREE_CODE (arg00) == LSHIFT_EXPR
12466 && integer_onep (TREE_OPERAND (arg00, 0)))
12468 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12469 arg01, TREE_OPERAND (arg00, 1));
12470 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12471 build_int_cst (TREE_TYPE (arg0), 1));
12472 return fold_build2_loc (loc, code, type,
12473 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12476 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12477 && integer_onep (TREE_OPERAND (arg01, 0)))
12479 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12480 arg00, TREE_OPERAND (arg01, 1));
12481 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12482 build_int_cst (TREE_TYPE (arg0), 1));
12483 return fold_build2_loc (loc, code, type,
12484 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12489 /* If this is an NE or EQ comparison of zero against the result of a
12490 signed MOD operation whose second operand is a power of 2, make
12491 the MOD operation unsigned since it is simpler and equivalent. */
12492 if (integer_zerop (arg1)
12493 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12494 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12495 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12496 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12497 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12498 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12500 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12501 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12502 fold_convert_loc (loc, newtype,
12503 TREE_OPERAND (arg0, 0)),
12504 fold_convert_loc (loc, newtype,
12505 TREE_OPERAND (arg0, 1)));
12507 return fold_build2_loc (loc, code, type, newmod,
12508 fold_convert_loc (loc, newtype, arg1));
12511 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12512 C1 is a valid shift constant, and C2 is a power of two, i.e.
12514 if (TREE_CODE (arg0) == BIT_AND_EXPR
12515 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12516 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12518 && integer_pow2p (TREE_OPERAND (arg0, 1))
12519 && integer_zerop (arg1))
12521 tree itype = TREE_TYPE (arg0);
12522 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12523 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12525 /* Check for a valid shift count. */
12526 if (TREE_INT_CST_HIGH (arg001) == 0
12527 && TREE_INT_CST_LOW (arg001) < prec)
12529 tree arg01 = TREE_OPERAND (arg0, 1);
12530 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12531 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12532 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12533 can be rewritten as (X & (C2 << C1)) != 0. */
12534 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12536 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12537 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12538 return fold_build2_loc (loc, code, type, tem,
12539 fold_convert_loc (loc, itype, arg1));
12541 /* Otherwise, for signed (arithmetic) shifts,
12542 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12543 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12544 else if (!TYPE_UNSIGNED (itype))
12545 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12546 arg000, build_int_cst (itype, 0));
12547 /* Otherwise, of unsigned (logical) shifts,
12548 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12549 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12551 return omit_one_operand_loc (loc, type,
12552 code == EQ_EXPR ? integer_one_node
12553 : integer_zero_node,
12558 /* If we have (A & C) == C where C is a power of 2, convert this into
12559 (A & C) != 0. Similarly for NE_EXPR. */
12560 if (TREE_CODE (arg0) == BIT_AND_EXPR
12561 && integer_pow2p (TREE_OPERAND (arg0, 1))
12562 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12563 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12564 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12565 integer_zero_node));
12567 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12568 bit, then fold the expression into A < 0 or A >= 0. */
12569 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12573 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12574 Similarly for NE_EXPR. */
12575 if (TREE_CODE (arg0) == BIT_AND_EXPR
12576 && TREE_CODE (arg1) == INTEGER_CST
12577 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12579 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12580 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12581 TREE_OPERAND (arg0, 1));
12583 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12584 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12586 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12587 if (integer_nonzerop (dandnotc))
12588 return omit_one_operand_loc (loc, type, rslt, arg0);
12591 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12592 Similarly for NE_EXPR. */
12593 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12594 && TREE_CODE (arg1) == INTEGER_CST
12595 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12597 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12599 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12600 TREE_OPERAND (arg0, 1),
12601 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12602 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12603 if (integer_nonzerop (candnotd))
12604 return omit_one_operand_loc (loc, type, rslt, arg0);
12607 /* If this is a comparison of a field, we may be able to simplify it. */
12608 if ((TREE_CODE (arg0) == COMPONENT_REF
12609 || TREE_CODE (arg0) == BIT_FIELD_REF)
12610 /* Handle the constant case even without -O
12611 to make sure the warnings are given. */
12612 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12614 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12619 /* Optimize comparisons of strlen vs zero to a compare of the
12620 first character of the string vs zero. To wit,
12621 strlen(ptr) == 0 => *ptr == 0
12622 strlen(ptr) != 0 => *ptr != 0
12623 Other cases should reduce to one of these two (or a constant)
12624 due to the return value of strlen being unsigned. */
12625 if (TREE_CODE (arg0) == CALL_EXPR
12626 && integer_zerop (arg1))
12628 tree fndecl = get_callee_fndecl (arg0);
12631 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12632 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12633 && call_expr_nargs (arg0) == 1
12634 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12636 tree iref = build_fold_indirect_ref_loc (loc,
12637 CALL_EXPR_ARG (arg0, 0));
12638 return fold_build2_loc (loc, code, type, iref,
12639 build_int_cst (TREE_TYPE (iref), 0));
12643 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12644 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12645 if (TREE_CODE (arg0) == RSHIFT_EXPR
12646 && integer_zerop (arg1)
12647 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12649 tree arg00 = TREE_OPERAND (arg0, 0);
12650 tree arg01 = TREE_OPERAND (arg0, 1);
12651 tree itype = TREE_TYPE (arg00);
12652 if (TREE_INT_CST_HIGH (arg01) == 0
12653 && TREE_INT_CST_LOW (arg01)
12654 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12656 if (TYPE_UNSIGNED (itype))
12658 itype = signed_type_for (itype);
12659 arg00 = fold_convert_loc (loc, itype, arg00);
12661 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12662 type, arg00, build_int_cst (itype, 0));
12666 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12667 if (integer_zerop (arg1)
12668 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12669 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12670 TREE_OPERAND (arg0, 1));
12672 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12673 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12674 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12675 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12676 build_int_cst (TREE_TYPE (arg0), 0));
12677 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12678 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12679 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12680 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12681 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12682 build_int_cst (TREE_TYPE (arg0), 0));
12684 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12685 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12686 && TREE_CODE (arg1) == INTEGER_CST
12687 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12688 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12689 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12690 TREE_OPERAND (arg0, 1), arg1));
12692 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12693 (X & C) == 0 when C is a single bit. */
12694 if (TREE_CODE (arg0) == BIT_AND_EXPR
12695 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12696 && integer_zerop (arg1)
12697 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12699 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12700 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12701 TREE_OPERAND (arg0, 1));
12702 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12704 fold_convert_loc (loc, TREE_TYPE (arg0),
12708 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12709 constant C is a power of two, i.e. a single bit. */
12710 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12711 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12712 && integer_zerop (arg1)
12713 && integer_pow2p (TREE_OPERAND (arg0, 1))
12714 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12715 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12717 tree arg00 = TREE_OPERAND (arg0, 0);
12718 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12719 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12722 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12723 when is C is a power of two, i.e. a single bit. */
12724 if (TREE_CODE (arg0) == BIT_AND_EXPR
12725 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12726 && integer_zerop (arg1)
12727 && integer_pow2p (TREE_OPERAND (arg0, 1))
12728 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12729 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12731 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12732 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12733 arg000, TREE_OPERAND (arg0, 1));
12734 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12735 tem, build_int_cst (TREE_TYPE (tem), 0));
12738 if (integer_zerop (arg1)
12739 && tree_expr_nonzero_p (arg0))
12741 tree res = constant_boolean_node (code==NE_EXPR, type);
12742 return omit_one_operand_loc (loc, type, res, arg0);
12745 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12746 if (TREE_CODE (arg0) == NEGATE_EXPR
12747 && TREE_CODE (arg1) == NEGATE_EXPR)
12748 return fold_build2_loc (loc, code, type,
12749 TREE_OPERAND (arg0, 0),
12750 fold_convert_loc (loc, TREE_TYPE (arg0),
12751 TREE_OPERAND (arg1, 0)));
12753 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12754 if (TREE_CODE (arg0) == BIT_AND_EXPR
12755 && TREE_CODE (arg1) == BIT_AND_EXPR)
12757 tree arg00 = TREE_OPERAND (arg0, 0);
12758 tree arg01 = TREE_OPERAND (arg0, 1);
12759 tree arg10 = TREE_OPERAND (arg1, 0);
12760 tree arg11 = TREE_OPERAND (arg1, 1);
12761 tree itype = TREE_TYPE (arg0);
12763 if (operand_equal_p (arg01, arg11, 0))
12764 return fold_build2_loc (loc, code, type,
12765 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12766 fold_build2_loc (loc,
12767 BIT_XOR_EXPR, itype,
12770 build_int_cst (itype, 0));
12772 if (operand_equal_p (arg01, arg10, 0))
12773 return fold_build2_loc (loc, code, type,
12774 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12775 fold_build2_loc (loc,
12776 BIT_XOR_EXPR, itype,
12779 build_int_cst (itype, 0));
12781 if (operand_equal_p (arg00, arg11, 0))
12782 return fold_build2_loc (loc, code, type,
12783 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12784 fold_build2_loc (loc,
12785 BIT_XOR_EXPR, itype,
12788 build_int_cst (itype, 0));
12790 if (operand_equal_p (arg00, arg10, 0))
12791 return fold_build2_loc (loc, code, type,
12792 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12793 fold_build2_loc (loc,
12794 BIT_XOR_EXPR, itype,
12797 build_int_cst (itype, 0));
12800 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12801 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12803 tree arg00 = TREE_OPERAND (arg0, 0);
12804 tree arg01 = TREE_OPERAND (arg0, 1);
12805 tree arg10 = TREE_OPERAND (arg1, 0);
12806 tree arg11 = TREE_OPERAND (arg1, 1);
12807 tree itype = TREE_TYPE (arg0);
12809 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12810 operand_equal_p guarantees no side-effects so we don't need
12811 to use omit_one_operand on Z. */
12812 if (operand_equal_p (arg01, arg11, 0))
12813 return fold_build2_loc (loc, code, type, arg00,
12814 fold_convert_loc (loc, TREE_TYPE (arg00),
12816 if (operand_equal_p (arg01, arg10, 0))
12817 return fold_build2_loc (loc, code, type, arg00,
12818 fold_convert_loc (loc, TREE_TYPE (arg00),
12820 if (operand_equal_p (arg00, arg11, 0))
12821 return fold_build2_loc (loc, code, type, arg01,
12822 fold_convert_loc (loc, TREE_TYPE (arg01),
12824 if (operand_equal_p (arg00, arg10, 0))
12825 return fold_build2_loc (loc, code, type, arg01,
12826 fold_convert_loc (loc, TREE_TYPE (arg01),
12829 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12830 if (TREE_CODE (arg01) == INTEGER_CST
12831 && TREE_CODE (arg11) == INTEGER_CST)
12833 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12834 fold_convert_loc (loc, itype, arg11));
12835 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12836 return fold_build2_loc (loc, code, type, tem,
12837 fold_convert_loc (loc, itype, arg10));
12841 /* Attempt to simplify equality/inequality comparisons of complex
12842 values. Only lower the comparison if the result is known or
12843 can be simplified to a single scalar comparison. */
12844 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12845 || TREE_CODE (arg0) == COMPLEX_CST)
12846 && (TREE_CODE (arg1) == COMPLEX_EXPR
12847 || TREE_CODE (arg1) == COMPLEX_CST))
12849 tree real0, imag0, real1, imag1;
12852 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12854 real0 = TREE_OPERAND (arg0, 0);
12855 imag0 = TREE_OPERAND (arg0, 1);
12859 real0 = TREE_REALPART (arg0);
12860 imag0 = TREE_IMAGPART (arg0);
12863 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12865 real1 = TREE_OPERAND (arg1, 0);
12866 imag1 = TREE_OPERAND (arg1, 1);
12870 real1 = TREE_REALPART (arg1);
12871 imag1 = TREE_IMAGPART (arg1);
12874 rcond = fold_binary_loc (loc, code, type, real0, real1);
12875 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12877 if (integer_zerop (rcond))
12879 if (code == EQ_EXPR)
12880 return omit_two_operands_loc (loc, type, boolean_false_node,
12882 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12886 if (code == NE_EXPR)
12887 return omit_two_operands_loc (loc, type, boolean_true_node,
12889 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12893 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12894 if (icond && TREE_CODE (icond) == INTEGER_CST)
12896 if (integer_zerop (icond))
12898 if (code == EQ_EXPR)
12899 return omit_two_operands_loc (loc, type, boolean_false_node,
12901 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12905 if (code == NE_EXPR)
12906 return omit_two_operands_loc (loc, type, boolean_true_node,
12908 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12919 tem = fold_comparison (loc, code, type, op0, op1);
12920 if (tem != NULL_TREE)
12923 /* Transform comparisons of the form X +- C CMP X. */
12924 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12925 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12926 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12927 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12928 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12931 tree arg01 = TREE_OPERAND (arg0, 1);
12932 enum tree_code code0 = TREE_CODE (arg0);
12935 if (TREE_CODE (arg01) == REAL_CST)
12936 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12938 is_positive = tree_int_cst_sgn (arg01);
12940 /* (X - c) > X becomes false. */
12941 if (code == GT_EXPR
12942 && ((code0 == MINUS_EXPR && is_positive >= 0)
12943 || (code0 == PLUS_EXPR && is_positive <= 0)))
12945 if (TREE_CODE (arg01) == INTEGER_CST
12946 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12947 fold_overflow_warning (("assuming signed overflow does not "
12948 "occur when assuming that (X - c) > X "
12949 "is always false"),
12950 WARN_STRICT_OVERFLOW_ALL);
12951 return constant_boolean_node (0, type);
12954 /* Likewise (X + c) < X becomes false. */
12955 if (code == LT_EXPR
12956 && ((code0 == PLUS_EXPR && is_positive >= 0)
12957 || (code0 == MINUS_EXPR && is_positive <= 0)))
12959 if (TREE_CODE (arg01) == INTEGER_CST
12960 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12961 fold_overflow_warning (("assuming signed overflow does not "
12962 "occur when assuming that "
12963 "(X + c) < X is always false"),
12964 WARN_STRICT_OVERFLOW_ALL);
12965 return constant_boolean_node (0, type);
12968 /* Convert (X - c) <= X to true. */
12969 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12971 && ((code0 == MINUS_EXPR && is_positive >= 0)
12972 || (code0 == PLUS_EXPR && is_positive <= 0)))
12974 if (TREE_CODE (arg01) == INTEGER_CST
12975 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12976 fold_overflow_warning (("assuming signed overflow does not "
12977 "occur when assuming that "
12978 "(X - c) <= X is always true"),
12979 WARN_STRICT_OVERFLOW_ALL);
12980 return constant_boolean_node (1, type);
12983 /* Convert (X + c) >= X to true. */
12984 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12986 && ((code0 == PLUS_EXPR && is_positive >= 0)
12987 || (code0 == MINUS_EXPR && is_positive <= 0)))
12989 if (TREE_CODE (arg01) == INTEGER_CST
12990 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12991 fold_overflow_warning (("assuming signed overflow does not "
12992 "occur when assuming that "
12993 "(X + c) >= X is always true"),
12994 WARN_STRICT_OVERFLOW_ALL);
12995 return constant_boolean_node (1, type);
12998 if (TREE_CODE (arg01) == INTEGER_CST)
13000 /* Convert X + c > X and X - c < X to true for integers. */
13001 if (code == GT_EXPR
13002 && ((code0 == PLUS_EXPR && is_positive > 0)
13003 || (code0 == MINUS_EXPR && is_positive < 0)))
13005 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13006 fold_overflow_warning (("assuming signed overflow does "
13007 "not occur when assuming that "
13008 "(X + c) > X is always true"),
13009 WARN_STRICT_OVERFLOW_ALL);
13010 return constant_boolean_node (1, type);
13013 if (code == LT_EXPR
13014 && ((code0 == MINUS_EXPR && is_positive > 0)
13015 || (code0 == PLUS_EXPR && is_positive < 0)))
13017 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13018 fold_overflow_warning (("assuming signed overflow does "
13019 "not occur when assuming that "
13020 "(X - c) < X is always true"),
13021 WARN_STRICT_OVERFLOW_ALL);
13022 return constant_boolean_node (1, type);
13025 /* Convert X + c <= X and X - c >= X to false for integers. */
13026 if (code == LE_EXPR
13027 && ((code0 == PLUS_EXPR && is_positive > 0)
13028 || (code0 == MINUS_EXPR && is_positive < 0)))
13030 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13031 fold_overflow_warning (("assuming signed overflow does "
13032 "not occur when assuming that "
13033 "(X + c) <= X is always false"),
13034 WARN_STRICT_OVERFLOW_ALL);
13035 return constant_boolean_node (0, type);
13038 if (code == GE_EXPR
13039 && ((code0 == MINUS_EXPR && is_positive > 0)
13040 || (code0 == PLUS_EXPR && is_positive < 0)))
13042 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13043 fold_overflow_warning (("assuming signed overflow does "
13044 "not occur when assuming that "
13045 "(X - c) >= X is always false"),
13046 WARN_STRICT_OVERFLOW_ALL);
13047 return constant_boolean_node (0, type);
13052 /* Comparisons with the highest or lowest possible integer of
13053 the specified precision will have known values. */
13055 tree arg1_type = TREE_TYPE (arg1);
13056 unsigned int width = TYPE_PRECISION (arg1_type);
13058 if (TREE_CODE (arg1) == INTEGER_CST
13059 && width <= 2 * HOST_BITS_PER_WIDE_INT
13060 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13062 HOST_WIDE_INT signed_max_hi;
13063 unsigned HOST_WIDE_INT signed_max_lo;
13064 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13066 if (width <= HOST_BITS_PER_WIDE_INT)
13068 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13073 if (TYPE_UNSIGNED (arg1_type))
13075 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13081 max_lo = signed_max_lo;
13082 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13088 width -= HOST_BITS_PER_WIDE_INT;
13089 signed_max_lo = -1;
13090 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13095 if (TYPE_UNSIGNED (arg1_type))
13097 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13102 max_hi = signed_max_hi;
13103 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13107 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13108 && TREE_INT_CST_LOW (arg1) == max_lo)
13112 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13115 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13118 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13121 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13123 /* The GE_EXPR and LT_EXPR cases above are not normally
13124 reached because of previous transformations. */
13129 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13131 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13135 arg1 = const_binop (PLUS_EXPR, arg1,
13136 build_int_cst (TREE_TYPE (arg1), 1));
13137 return fold_build2_loc (loc, EQ_EXPR, type,
13138 fold_convert_loc (loc,
13139 TREE_TYPE (arg1), arg0),
13142 arg1 = const_binop (PLUS_EXPR, arg1,
13143 build_int_cst (TREE_TYPE (arg1), 1));
13144 return fold_build2_loc (loc, NE_EXPR, type,
13145 fold_convert_loc (loc, TREE_TYPE (arg1),
13151 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13153 && TREE_INT_CST_LOW (arg1) == min_lo)
13157 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13160 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13163 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13166 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13171 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13173 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13177 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13178 return fold_build2_loc (loc, NE_EXPR, type,
13179 fold_convert_loc (loc,
13180 TREE_TYPE (arg1), arg0),
13183 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13184 return fold_build2_loc (loc, EQ_EXPR, type,
13185 fold_convert_loc (loc, TREE_TYPE (arg1),
13192 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13193 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13194 && TYPE_UNSIGNED (arg1_type)
13195 /* We will flip the signedness of the comparison operator
13196 associated with the mode of arg1, so the sign bit is
13197 specified by this mode. Check that arg1 is the signed
13198 max associated with this sign bit. */
13199 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13200 /* signed_type does not work on pointer types. */
13201 && INTEGRAL_TYPE_P (arg1_type))
13203 /* The following case also applies to X < signed_max+1
13204 and X >= signed_max+1 because previous transformations. */
13205 if (code == LE_EXPR || code == GT_EXPR)
13208 st = signed_type_for (TREE_TYPE (arg1));
13209 return fold_build2_loc (loc,
13210 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13211 type, fold_convert_loc (loc, st, arg0),
13212 build_int_cst (st, 0));
13218 /* If we are comparing an ABS_EXPR with a constant, we can
13219 convert all the cases into explicit comparisons, but they may
13220 well not be faster than doing the ABS and one comparison.
13221 But ABS (X) <= C is a range comparison, which becomes a subtraction
13222 and a comparison, and is probably faster. */
13223 if (code == LE_EXPR
13224 && TREE_CODE (arg1) == INTEGER_CST
13225 && TREE_CODE (arg0) == ABS_EXPR
13226 && ! TREE_SIDE_EFFECTS (arg0)
13227 && (0 != (tem = negate_expr (arg1)))
13228 && TREE_CODE (tem) == INTEGER_CST
13229 && !TREE_OVERFLOW (tem))
13230 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13231 build2 (GE_EXPR, type,
13232 TREE_OPERAND (arg0, 0), tem),
13233 build2 (LE_EXPR, type,
13234 TREE_OPERAND (arg0, 0), arg1));
13236 /* Convert ABS_EXPR<x> >= 0 to true. */
13237 strict_overflow_p = false;
13238 if (code == GE_EXPR
13239 && (integer_zerop (arg1)
13240 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13241 && real_zerop (arg1)))
13242 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13244 if (strict_overflow_p)
13245 fold_overflow_warning (("assuming signed overflow does not occur "
13246 "when simplifying comparison of "
13247 "absolute value and zero"),
13248 WARN_STRICT_OVERFLOW_CONDITIONAL);
13249 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13252 /* Convert ABS_EXPR<x> < 0 to false. */
13253 strict_overflow_p = false;
13254 if (code == LT_EXPR
13255 && (integer_zerop (arg1) || real_zerop (arg1))
13256 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13258 if (strict_overflow_p)
13259 fold_overflow_warning (("assuming signed overflow does not occur "
13260 "when simplifying comparison of "
13261 "absolute value and zero"),
13262 WARN_STRICT_OVERFLOW_CONDITIONAL);
13263 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13266 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13267 and similarly for >= into !=. */
13268 if ((code == LT_EXPR || code == GE_EXPR)
13269 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13270 && TREE_CODE (arg1) == LSHIFT_EXPR
13271 && integer_onep (TREE_OPERAND (arg1, 0)))
13272 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13273 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13274 TREE_OPERAND (arg1, 1)),
13275 build_int_cst (TREE_TYPE (arg0), 0));
13277 if ((code == LT_EXPR || code == GE_EXPR)
13278 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13279 && CONVERT_EXPR_P (arg1)
13280 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13281 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13283 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13284 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13285 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13286 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13287 build_int_cst (TREE_TYPE (arg0), 0));
13292 case UNORDERED_EXPR:
13300 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13302 t1 = fold_relational_const (code, type, arg0, arg1);
13303 if (t1 != NULL_TREE)
13307 /* If the first operand is NaN, the result is constant. */
13308 if (TREE_CODE (arg0) == REAL_CST
13309 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13310 && (code != LTGT_EXPR || ! flag_trapping_math))
13312 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13313 ? integer_zero_node
13314 : integer_one_node;
13315 return omit_one_operand_loc (loc, type, t1, arg1);
13318 /* If the second operand is NaN, the result is constant. */
13319 if (TREE_CODE (arg1) == REAL_CST
13320 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13321 && (code != LTGT_EXPR || ! flag_trapping_math))
13323 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13324 ? integer_zero_node
13325 : integer_one_node;
13326 return omit_one_operand_loc (loc, type, t1, arg0);
13329 /* Simplify unordered comparison of something with itself. */
13330 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13331 && operand_equal_p (arg0, arg1, 0))
13332 return constant_boolean_node (1, type);
13334 if (code == LTGT_EXPR
13335 && !flag_trapping_math
13336 && operand_equal_p (arg0, arg1, 0))
13337 return constant_boolean_node (0, type);
13339 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13341 tree targ0 = strip_float_extensions (arg0);
13342 tree targ1 = strip_float_extensions (arg1);
13343 tree newtype = TREE_TYPE (targ0);
13345 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13346 newtype = TREE_TYPE (targ1);
13348 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13349 return fold_build2_loc (loc, code, type,
13350 fold_convert_loc (loc, newtype, targ0),
13351 fold_convert_loc (loc, newtype, targ1));
13356 case COMPOUND_EXPR:
13357 /* When pedantic, a compound expression can be neither an lvalue
13358 nor an integer constant expression. */
13359 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13361 /* Don't let (0, 0) be null pointer constant. */
13362 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13363 : fold_convert_loc (loc, type, arg1);
13364 return pedantic_non_lvalue_loc (loc, tem);
13367 if ((TREE_CODE (arg0) == REAL_CST
13368 && TREE_CODE (arg1) == REAL_CST)
13369 || (TREE_CODE (arg0) == INTEGER_CST
13370 && TREE_CODE (arg1) == INTEGER_CST))
13371 return build_complex (type, arg0, arg1);
13372 if (TREE_CODE (arg0) == REALPART_EXPR
13373 && TREE_CODE (arg1) == IMAGPART_EXPR
13374 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13375 && operand_equal_p (TREE_OPERAND (arg0, 0),
13376 TREE_OPERAND (arg1, 0), 0))
13377 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13378 TREE_OPERAND (arg1, 0));
13382 /* An ASSERT_EXPR should never be passed to fold_binary. */
13383 gcc_unreachable ();
13387 } /* switch (code) */
13390 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13391 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13395 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13397 switch (TREE_CODE (*tp))
13403 *walk_subtrees = 0;
13405 /* ... fall through ... */
13412 /* Return whether the sub-tree ST contains a label which is accessible from
13413 outside the sub-tree. */
13416 contains_label_p (tree st)
13419 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13422 /* Fold a ternary expression of code CODE and type TYPE with operands
13423 OP0, OP1, and OP2. Return the folded expression if folding is
13424 successful. Otherwise, return NULL_TREE. */
13427 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13428 tree op0, tree op1, tree op2)
13431 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13432 enum tree_code_class kind = TREE_CODE_CLASS (code);
13434 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13435 && TREE_CODE_LENGTH (code) == 3);
13437 /* Strip any conversions that don't change the mode. This is safe
13438 for every expression, except for a comparison expression because
13439 its signedness is derived from its operands. So, in the latter
13440 case, only strip conversions that don't change the signedness.
13442 Note that this is done as an internal manipulation within the
13443 constant folder, in order to find the simplest representation of
13444 the arguments so that their form can be studied. In any cases,
13445 the appropriate type conversions should be put back in the tree
13446 that will get out of the constant folder. */
13467 case COMPONENT_REF:
13468 if (TREE_CODE (arg0) == CONSTRUCTOR
13469 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13471 unsigned HOST_WIDE_INT idx;
13473 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13480 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13481 so all simple results must be passed through pedantic_non_lvalue. */
13482 if (TREE_CODE (arg0) == INTEGER_CST)
13484 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13485 tem = integer_zerop (arg0) ? op2 : op1;
13486 /* Only optimize constant conditions when the selected branch
13487 has the same type as the COND_EXPR. This avoids optimizing
13488 away "c ? x : throw", where the throw has a void type.
13489 Avoid throwing away that operand which contains label. */
13490 if ((!TREE_SIDE_EFFECTS (unused_op)
13491 || !contains_label_p (unused_op))
13492 && (! VOID_TYPE_P (TREE_TYPE (tem))
13493 || VOID_TYPE_P (type)))
13494 return pedantic_non_lvalue_loc (loc, tem);
13497 if (operand_equal_p (arg1, op2, 0))
13498 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13500 /* If we have A op B ? A : C, we may be able to convert this to a
13501 simpler expression, depending on the operation and the values
13502 of B and C. Signed zeros prevent all of these transformations,
13503 for reasons given above each one.
13505 Also try swapping the arguments and inverting the conditional. */
13506 if (COMPARISON_CLASS_P (arg0)
13507 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13508 arg1, TREE_OPERAND (arg0, 1))
13509 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13511 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13516 if (COMPARISON_CLASS_P (arg0)
13517 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13519 TREE_OPERAND (arg0, 1))
13520 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13522 location_t loc0 = expr_location_or (arg0, loc);
13523 tem = fold_truth_not_expr (loc0, arg0);
13524 if (tem && COMPARISON_CLASS_P (tem))
13526 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13532 /* If the second operand is simpler than the third, swap them
13533 since that produces better jump optimization results. */
13534 if (truth_value_p (TREE_CODE (arg0))
13535 && tree_swap_operands_p (op1, op2, false))
13537 location_t loc0 = expr_location_or (arg0, loc);
13538 /* See if this can be inverted. If it can't, possibly because
13539 it was a floating-point inequality comparison, don't do
13541 tem = fold_truth_not_expr (loc0, arg0);
13543 return fold_build3_loc (loc, code, type, tem, op2, op1);
13546 /* Convert A ? 1 : 0 to simply A. */
13547 if (integer_onep (op1)
13548 && integer_zerop (op2)
13549 /* If we try to convert OP0 to our type, the
13550 call to fold will try to move the conversion inside
13551 a COND, which will recurse. In that case, the COND_EXPR
13552 is probably the best choice, so leave it alone. */
13553 && type == TREE_TYPE (arg0))
13554 return pedantic_non_lvalue_loc (loc, arg0);
13556 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13557 over COND_EXPR in cases such as floating point comparisons. */
13558 if (integer_zerop (op1)
13559 && integer_onep (op2)
13560 && truth_value_p (TREE_CODE (arg0)))
13561 return pedantic_non_lvalue_loc (loc,
13562 fold_convert_loc (loc, type,
13563 invert_truthvalue_loc (loc,
13566 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13567 if (TREE_CODE (arg0) == LT_EXPR
13568 && integer_zerop (TREE_OPERAND (arg0, 1))
13569 && integer_zerop (op2)
13570 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13572 /* sign_bit_p only checks ARG1 bits within A's precision.
13573 If <sign bit of A> has wider type than A, bits outside
13574 of A's precision in <sign bit of A> need to be checked.
13575 If they are all 0, this optimization needs to be done
13576 in unsigned A's type, if they are all 1 in signed A's type,
13577 otherwise this can't be done. */
13578 if (TYPE_PRECISION (TREE_TYPE (tem))
13579 < TYPE_PRECISION (TREE_TYPE (arg1))
13580 && TYPE_PRECISION (TREE_TYPE (tem))
13581 < TYPE_PRECISION (type))
13583 unsigned HOST_WIDE_INT mask_lo;
13584 HOST_WIDE_INT mask_hi;
13585 int inner_width, outer_width;
13588 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13589 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13590 if (outer_width > TYPE_PRECISION (type))
13591 outer_width = TYPE_PRECISION (type);
13593 if (outer_width > HOST_BITS_PER_WIDE_INT)
13595 mask_hi = ((unsigned HOST_WIDE_INT) -1
13596 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13602 mask_lo = ((unsigned HOST_WIDE_INT) -1
13603 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13605 if (inner_width > HOST_BITS_PER_WIDE_INT)
13607 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13608 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13612 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13613 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13615 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13616 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13618 tem_type = signed_type_for (TREE_TYPE (tem));
13619 tem = fold_convert_loc (loc, tem_type, tem);
13621 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13622 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13624 tem_type = unsigned_type_for (TREE_TYPE (tem));
13625 tem = fold_convert_loc (loc, tem_type, tem);
13633 fold_convert_loc (loc, type,
13634 fold_build2_loc (loc, BIT_AND_EXPR,
13635 TREE_TYPE (tem), tem,
13636 fold_convert_loc (loc,
13641 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13642 already handled above. */
13643 if (TREE_CODE (arg0) == BIT_AND_EXPR
13644 && integer_onep (TREE_OPERAND (arg0, 1))
13645 && integer_zerop (op2)
13646 && integer_pow2p (arg1))
13648 tree tem = TREE_OPERAND (arg0, 0);
13650 if (TREE_CODE (tem) == RSHIFT_EXPR
13651 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13652 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13653 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13654 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13655 TREE_OPERAND (tem, 0), arg1);
13658 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13659 is probably obsolete because the first operand should be a
13660 truth value (that's why we have the two cases above), but let's
13661 leave it in until we can confirm this for all front-ends. */
13662 if (integer_zerop (op2)
13663 && TREE_CODE (arg0) == NE_EXPR
13664 && integer_zerop (TREE_OPERAND (arg0, 1))
13665 && integer_pow2p (arg1)
13666 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13667 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13668 arg1, OEP_ONLY_CONST))
13669 return pedantic_non_lvalue_loc (loc,
13670 fold_convert_loc (loc, type,
13671 TREE_OPERAND (arg0, 0)));
13673 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13674 if (integer_zerop (op2)
13675 && truth_value_p (TREE_CODE (arg0))
13676 && truth_value_p (TREE_CODE (arg1)))
13677 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13678 fold_convert_loc (loc, type, arg0),
13681 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13682 if (integer_onep (op2)
13683 && truth_value_p (TREE_CODE (arg0))
13684 && truth_value_p (TREE_CODE (arg1)))
13686 location_t loc0 = expr_location_or (arg0, loc);
13687 /* Only perform transformation if ARG0 is easily inverted. */
13688 tem = fold_truth_not_expr (loc0, arg0);
13690 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13691 fold_convert_loc (loc, type, tem),
13695 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13696 if (integer_zerop (arg1)
13697 && truth_value_p (TREE_CODE (arg0))
13698 && truth_value_p (TREE_CODE (op2)))
13700 location_t loc0 = expr_location_or (arg0, loc);
13701 /* Only perform transformation if ARG0 is easily inverted. */
13702 tem = fold_truth_not_expr (loc0, arg0);
13704 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13705 fold_convert_loc (loc, type, tem),
13709 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13710 if (integer_onep (arg1)
13711 && truth_value_p (TREE_CODE (arg0))
13712 && truth_value_p (TREE_CODE (op2)))
13713 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13714 fold_convert_loc (loc, type, arg0),
13720 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13721 of fold_ternary on them. */
13722 gcc_unreachable ();
13724 case BIT_FIELD_REF:
13725 if ((TREE_CODE (arg0) == VECTOR_CST
13726 || TREE_CODE (arg0) == CONSTRUCTOR)
13727 && type == TREE_TYPE (TREE_TYPE (arg0)))
13729 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13730 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13733 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13734 && (idx % width) == 0
13735 && (idx = idx / width)
13736 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13738 if (TREE_CODE (arg0) == VECTOR_CST)
13740 tree elements = TREE_VECTOR_CST_ELTS (arg0);
13741 while (idx-- > 0 && elements)
13742 elements = TREE_CHAIN (elements);
13744 return TREE_VALUE (elements);
13746 else if (idx < CONSTRUCTOR_NELTS (arg0))
13747 return CONSTRUCTOR_ELT (arg0, idx)->value;
13748 return build_zero_cst (type);
13752 /* A bit-field-ref that referenced the full argument can be stripped. */
13753 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13754 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13755 && integer_zerop (op2))
13756 return fold_convert_loc (loc, type, arg0);
13761 /* For integers we can decompose the FMA if possible. */
13762 if (TREE_CODE (arg0) == INTEGER_CST
13763 && TREE_CODE (arg1) == INTEGER_CST)
13764 return fold_build2_loc (loc, PLUS_EXPR, type,
13765 const_binop (MULT_EXPR, arg0, arg1), arg2);
13766 if (integer_zerop (arg2))
13767 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13769 return fold_fma (loc, type, arg0, arg1, arg2);
13773 } /* switch (code) */
13776 /* Perform constant folding and related simplification of EXPR.
13777 The related simplifications include x*1 => x, x*0 => 0, etc.,
13778 and application of the associative law.
13779 NOP_EXPR conversions may be removed freely (as long as we
13780 are careful not to change the type of the overall expression).
13781 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13782 but we can constant-fold them if they have constant operands. */
13784 #ifdef ENABLE_FOLD_CHECKING
13785 # define fold(x) fold_1 (x)
13786 static tree fold_1 (tree);
13792 const tree t = expr;
13793 enum tree_code code = TREE_CODE (t);
13794 enum tree_code_class kind = TREE_CODE_CLASS (code);
13796 location_t loc = EXPR_LOCATION (expr);
13798 /* Return right away if a constant. */
13799 if (kind == tcc_constant)
13802 /* CALL_EXPR-like objects with variable numbers of operands are
13803 treated specially. */
13804 if (kind == tcc_vl_exp)
13806 if (code == CALL_EXPR)
13808 tem = fold_call_expr (loc, expr, false);
13809 return tem ? tem : expr;
13814 if (IS_EXPR_CODE_CLASS (kind))
13816 tree type = TREE_TYPE (t);
13817 tree op0, op1, op2;
13819 switch (TREE_CODE_LENGTH (code))
13822 op0 = TREE_OPERAND (t, 0);
13823 tem = fold_unary_loc (loc, code, type, op0);
13824 return tem ? tem : expr;
13826 op0 = TREE_OPERAND (t, 0);
13827 op1 = TREE_OPERAND (t, 1);
13828 tem = fold_binary_loc (loc, code, type, op0, op1);
13829 return tem ? tem : expr;
13831 op0 = TREE_OPERAND (t, 0);
13832 op1 = TREE_OPERAND (t, 1);
13833 op2 = TREE_OPERAND (t, 2);
13834 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13835 return tem ? tem : expr;
13845 tree op0 = TREE_OPERAND (t, 0);
13846 tree op1 = TREE_OPERAND (t, 1);
13848 if (TREE_CODE (op1) == INTEGER_CST
13849 && TREE_CODE (op0) == CONSTRUCTOR
13850 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13852 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13853 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13854 unsigned HOST_WIDE_INT begin = 0;
13856 /* Find a matching index by means of a binary search. */
13857 while (begin != end)
13859 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13860 tree index = VEC_index (constructor_elt, elts, middle)->index;
13862 if (TREE_CODE (index) == INTEGER_CST
13863 && tree_int_cst_lt (index, op1))
13864 begin = middle + 1;
13865 else if (TREE_CODE (index) == INTEGER_CST
13866 && tree_int_cst_lt (op1, index))
13868 else if (TREE_CODE (index) == RANGE_EXPR
13869 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13870 begin = middle + 1;
13871 else if (TREE_CODE (index) == RANGE_EXPR
13872 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13875 return VEC_index (constructor_elt, elts, middle)->value;
13883 return fold (DECL_INITIAL (t));
13887 } /* switch (code) */
13890 #ifdef ENABLE_FOLD_CHECKING
13893 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13894 static void fold_check_failed (const_tree, const_tree);
13895 void print_fold_checksum (const_tree);
13897 /* When --enable-checking=fold, compute a digest of expr before
13898 and after actual fold call to see if fold did not accidentally
13899 change original expr. */
13905 struct md5_ctx ctx;
13906 unsigned char checksum_before[16], checksum_after[16];
13909 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13910 md5_init_ctx (&ctx);
13911 fold_checksum_tree (expr, &ctx, ht);
13912 md5_finish_ctx (&ctx, checksum_before);
13915 ret = fold_1 (expr);
13917 md5_init_ctx (&ctx);
13918 fold_checksum_tree (expr, &ctx, ht);
13919 md5_finish_ctx (&ctx, checksum_after);
13922 if (memcmp (checksum_before, checksum_after, 16))
13923 fold_check_failed (expr, ret);
13929 print_fold_checksum (const_tree expr)
13931 struct md5_ctx ctx;
13932 unsigned char checksum[16], cnt;
13935 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13936 md5_init_ctx (&ctx);
13937 fold_checksum_tree (expr, &ctx, ht);
13938 md5_finish_ctx (&ctx, checksum);
13940 for (cnt = 0; cnt < 16; ++cnt)
13941 fprintf (stderr, "%02x", checksum[cnt]);
13942 putc ('\n', stderr);
13946 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13948 internal_error ("fold check: original tree changed by fold");
13952 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13955 enum tree_code code;
13956 union tree_node buf;
13962 slot = (void **) htab_find_slot (ht, expr, INSERT);
13965 *slot = CONST_CAST_TREE (expr);
13966 code = TREE_CODE (expr);
13967 if (TREE_CODE_CLASS (code) == tcc_declaration
13968 && DECL_ASSEMBLER_NAME_SET_P (expr))
13970 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13971 memcpy ((char *) &buf, expr, tree_size (expr));
13972 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13973 expr = (tree) &buf;
13975 else if (TREE_CODE_CLASS (code) == tcc_type
13976 && (TYPE_POINTER_TO (expr)
13977 || TYPE_REFERENCE_TO (expr)
13978 || TYPE_CACHED_VALUES_P (expr)
13979 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13980 || TYPE_NEXT_VARIANT (expr)))
13982 /* Allow these fields to be modified. */
13984 memcpy ((char *) &buf, expr, tree_size (expr));
13985 expr = tmp = (tree) &buf;
13986 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13987 TYPE_POINTER_TO (tmp) = NULL;
13988 TYPE_REFERENCE_TO (tmp) = NULL;
13989 TYPE_NEXT_VARIANT (tmp) = NULL;
13990 if (TYPE_CACHED_VALUES_P (tmp))
13992 TYPE_CACHED_VALUES_P (tmp) = 0;
13993 TYPE_CACHED_VALUES (tmp) = NULL;
13996 md5_process_bytes (expr, tree_size (expr), ctx);
13997 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13998 if (TREE_CODE_CLASS (code) != tcc_type
13999 && TREE_CODE_CLASS (code) != tcc_declaration
14000 && code != TREE_LIST
14001 && code != SSA_NAME
14002 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14003 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14004 switch (TREE_CODE_CLASS (code))
14010 md5_process_bytes (TREE_STRING_POINTER (expr),
14011 TREE_STRING_LENGTH (expr), ctx);
14014 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14015 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14018 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14024 case tcc_exceptional:
14028 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14029 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14030 expr = TREE_CHAIN (expr);
14031 goto recursive_label;
14034 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14035 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14041 case tcc_expression:
14042 case tcc_reference:
14043 case tcc_comparison:
14046 case tcc_statement:
14048 len = TREE_OPERAND_LENGTH (expr);
14049 for (i = 0; i < len; ++i)
14050 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14052 case tcc_declaration:
14053 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14054 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14055 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14057 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14058 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14059 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14060 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14061 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14063 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14064 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14066 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14068 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14069 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14070 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14074 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14075 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14076 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14077 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14078 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14079 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14080 if (INTEGRAL_TYPE_P (expr)
14081 || SCALAR_FLOAT_TYPE_P (expr))
14083 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14084 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14086 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14087 if (TREE_CODE (expr) == RECORD_TYPE
14088 || TREE_CODE (expr) == UNION_TYPE
14089 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14090 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14091 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14098 /* Helper function for outputting the checksum of a tree T. When
14099 debugging with gdb, you can "define mynext" to be "next" followed
14100 by "call debug_fold_checksum (op0)", then just trace down till the
14103 DEBUG_FUNCTION void
14104 debug_fold_checksum (const_tree t)
14107 unsigned char checksum[16];
14108 struct md5_ctx ctx;
14109 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14111 md5_init_ctx (&ctx);
14112 fold_checksum_tree (t, &ctx, ht);
14113 md5_finish_ctx (&ctx, checksum);
14116 for (i = 0; i < 16; i++)
14117 fprintf (stderr, "%d ", checksum[i]);
14119 fprintf (stderr, "\n");
14124 /* Fold a unary tree expression with code CODE of type TYPE with an
14125 operand OP0. LOC is the location of the resulting expression.
14126 Return a folded expression if successful. Otherwise, return a tree
14127 expression with code CODE of type TYPE with an operand OP0. */
14130 fold_build1_stat_loc (location_t loc,
14131 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14134 #ifdef ENABLE_FOLD_CHECKING
14135 unsigned char checksum_before[16], checksum_after[16];
14136 struct md5_ctx ctx;
14139 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14140 md5_init_ctx (&ctx);
14141 fold_checksum_tree (op0, &ctx, ht);
14142 md5_finish_ctx (&ctx, checksum_before);
14146 tem = fold_unary_loc (loc, code, type, op0);
14148 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14150 #ifdef ENABLE_FOLD_CHECKING
14151 md5_init_ctx (&ctx);
14152 fold_checksum_tree (op0, &ctx, ht);
14153 md5_finish_ctx (&ctx, checksum_after);
14156 if (memcmp (checksum_before, checksum_after, 16))
14157 fold_check_failed (op0, tem);
14162 /* Fold a binary tree expression with code CODE of type TYPE with
14163 operands OP0 and OP1. LOC is the location of the resulting
14164 expression. Return a folded expression if successful. Otherwise,
14165 return a tree expression with code CODE of type TYPE with operands
14169 fold_build2_stat_loc (location_t loc,
14170 enum tree_code code, tree type, tree op0, tree op1
14174 #ifdef ENABLE_FOLD_CHECKING
14175 unsigned char checksum_before_op0[16],
14176 checksum_before_op1[16],
14177 checksum_after_op0[16],
14178 checksum_after_op1[16];
14179 struct md5_ctx ctx;
14182 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14183 md5_init_ctx (&ctx);
14184 fold_checksum_tree (op0, &ctx, ht);
14185 md5_finish_ctx (&ctx, checksum_before_op0);
14188 md5_init_ctx (&ctx);
14189 fold_checksum_tree (op1, &ctx, ht);
14190 md5_finish_ctx (&ctx, checksum_before_op1);
14194 tem = fold_binary_loc (loc, code, type, op0, op1);
14196 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14198 #ifdef ENABLE_FOLD_CHECKING
14199 md5_init_ctx (&ctx);
14200 fold_checksum_tree (op0, &ctx, ht);
14201 md5_finish_ctx (&ctx, checksum_after_op0);
14204 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14205 fold_check_failed (op0, tem);
14207 md5_init_ctx (&ctx);
14208 fold_checksum_tree (op1, &ctx, ht);
14209 md5_finish_ctx (&ctx, checksum_after_op1);
14212 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14213 fold_check_failed (op1, tem);
14218 /* Fold a ternary tree expression with code CODE of type TYPE with
14219 operands OP0, OP1, and OP2. Return a folded expression if
14220 successful. Otherwise, return a tree expression with code CODE of
14221 type TYPE with operands OP0, OP1, and OP2. */
14224 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14225 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14228 #ifdef ENABLE_FOLD_CHECKING
14229 unsigned char checksum_before_op0[16],
14230 checksum_before_op1[16],
14231 checksum_before_op2[16],
14232 checksum_after_op0[16],
14233 checksum_after_op1[16],
14234 checksum_after_op2[16];
14235 struct md5_ctx ctx;
14238 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14239 md5_init_ctx (&ctx);
14240 fold_checksum_tree (op0, &ctx, ht);
14241 md5_finish_ctx (&ctx, checksum_before_op0);
14244 md5_init_ctx (&ctx);
14245 fold_checksum_tree (op1, &ctx, ht);
14246 md5_finish_ctx (&ctx, checksum_before_op1);
14249 md5_init_ctx (&ctx);
14250 fold_checksum_tree (op2, &ctx, ht);
14251 md5_finish_ctx (&ctx, checksum_before_op2);
14255 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14256 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14258 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14260 #ifdef ENABLE_FOLD_CHECKING
14261 md5_init_ctx (&ctx);
14262 fold_checksum_tree (op0, &ctx, ht);
14263 md5_finish_ctx (&ctx, checksum_after_op0);
14266 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14267 fold_check_failed (op0, tem);
14269 md5_init_ctx (&ctx);
14270 fold_checksum_tree (op1, &ctx, ht);
14271 md5_finish_ctx (&ctx, checksum_after_op1);
14274 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14275 fold_check_failed (op1, tem);
14277 md5_init_ctx (&ctx);
14278 fold_checksum_tree (op2, &ctx, ht);
14279 md5_finish_ctx (&ctx, checksum_after_op2);
14282 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14283 fold_check_failed (op2, tem);
14288 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14289 arguments in ARGARRAY, and a null static chain.
14290 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14291 of type TYPE from the given operands as constructed by build_call_array. */
14294 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14295 int nargs, tree *argarray)
14298 #ifdef ENABLE_FOLD_CHECKING
14299 unsigned char checksum_before_fn[16],
14300 checksum_before_arglist[16],
14301 checksum_after_fn[16],
14302 checksum_after_arglist[16];
14303 struct md5_ctx ctx;
14307 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14308 md5_init_ctx (&ctx);
14309 fold_checksum_tree (fn, &ctx, ht);
14310 md5_finish_ctx (&ctx, checksum_before_fn);
14313 md5_init_ctx (&ctx);
14314 for (i = 0; i < nargs; i++)
14315 fold_checksum_tree (argarray[i], &ctx, ht);
14316 md5_finish_ctx (&ctx, checksum_before_arglist);
14320 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14322 #ifdef ENABLE_FOLD_CHECKING
14323 md5_init_ctx (&ctx);
14324 fold_checksum_tree (fn, &ctx, ht);
14325 md5_finish_ctx (&ctx, checksum_after_fn);
14328 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14329 fold_check_failed (fn, tem);
14331 md5_init_ctx (&ctx);
14332 for (i = 0; i < nargs; i++)
14333 fold_checksum_tree (argarray[i], &ctx, ht);
14334 md5_finish_ctx (&ctx, checksum_after_arglist);
14337 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14338 fold_check_failed (NULL_TREE, tem);
14343 /* Perform constant folding and related simplification of initializer
14344 expression EXPR. These behave identically to "fold_buildN" but ignore
14345 potential run-time traps and exceptions that fold must preserve. */
14347 #define START_FOLD_INIT \
14348 int saved_signaling_nans = flag_signaling_nans;\
14349 int saved_trapping_math = flag_trapping_math;\
14350 int saved_rounding_math = flag_rounding_math;\
14351 int saved_trapv = flag_trapv;\
14352 int saved_folding_initializer = folding_initializer;\
14353 flag_signaling_nans = 0;\
14354 flag_trapping_math = 0;\
14355 flag_rounding_math = 0;\
14357 folding_initializer = 1;
14359 #define END_FOLD_INIT \
14360 flag_signaling_nans = saved_signaling_nans;\
14361 flag_trapping_math = saved_trapping_math;\
14362 flag_rounding_math = saved_rounding_math;\
14363 flag_trapv = saved_trapv;\
14364 folding_initializer = saved_folding_initializer;
14367 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14368 tree type, tree op)
14373 result = fold_build1_loc (loc, code, type, op);
14380 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14381 tree type, tree op0, tree op1)
14386 result = fold_build2_loc (loc, code, type, op0, op1);
14393 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14394 tree type, tree op0, tree op1, tree op2)
14399 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14406 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14407 int nargs, tree *argarray)
14412 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14418 #undef START_FOLD_INIT
14419 #undef END_FOLD_INIT
14421 /* Determine if first argument is a multiple of second argument. Return 0 if
14422 it is not, or we cannot easily determined it to be.
14424 An example of the sort of thing we care about (at this point; this routine
14425 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14426 fold cases do now) is discovering that
14428 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14434 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14436 This code also handles discovering that
14438 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14440 is a multiple of 8 so we don't have to worry about dealing with a
14441 possible remainder.
14443 Note that we *look* inside a SAVE_EXPR only to determine how it was
14444 calculated; it is not safe for fold to do much of anything else with the
14445 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14446 at run time. For example, the latter example above *cannot* be implemented
14447 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14448 evaluation time of the original SAVE_EXPR is not necessarily the same at
14449 the time the new expression is evaluated. The only optimization of this
14450 sort that would be valid is changing
14452 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14456 SAVE_EXPR (I) * SAVE_EXPR (J)
14458 (where the same SAVE_EXPR (J) is used in the original and the
14459 transformed version). */
14462 multiple_of_p (tree type, const_tree top, const_tree bottom)
14464 if (operand_equal_p (top, bottom, 0))
14467 if (TREE_CODE (type) != INTEGER_TYPE)
14470 switch (TREE_CODE (top))
14473 /* Bitwise and provides a power of two multiple. If the mask is
14474 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14475 if (!integer_pow2p (bottom))
14480 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14481 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14485 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14486 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14489 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14493 op1 = TREE_OPERAND (top, 1);
14494 /* const_binop may not detect overflow correctly,
14495 so check for it explicitly here. */
14496 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14497 > TREE_INT_CST_LOW (op1)
14498 && TREE_INT_CST_HIGH (op1) == 0
14499 && 0 != (t1 = fold_convert (type,
14500 const_binop (LSHIFT_EXPR,
14503 && !TREE_OVERFLOW (t1))
14504 return multiple_of_p (type, t1, bottom);
14509 /* Can't handle conversions from non-integral or wider integral type. */
14510 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14511 || (TYPE_PRECISION (type)
14512 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14515 /* .. fall through ... */
14518 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14521 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14522 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14525 if (TREE_CODE (bottom) != INTEGER_CST
14526 || integer_zerop (bottom)
14527 || (TYPE_UNSIGNED (type)
14528 && (tree_int_cst_sgn (top) < 0
14529 || tree_int_cst_sgn (bottom) < 0)))
14531 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14539 /* Return true if CODE or TYPE is known to be non-negative. */
14542 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14544 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14545 && truth_value_p (code))
14546 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14547 have a signed:1 type (where the value is -1 and 0). */
14552 /* Return true if (CODE OP0) is known to be non-negative. If the return
14553 value is based on the assumption that signed overflow is undefined,
14554 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14555 *STRICT_OVERFLOW_P. */
14558 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14559 bool *strict_overflow_p)
14561 if (TYPE_UNSIGNED (type))
14567 /* We can't return 1 if flag_wrapv is set because
14568 ABS_EXPR<INT_MIN> = INT_MIN. */
14569 if (!INTEGRAL_TYPE_P (type))
14571 if (TYPE_OVERFLOW_UNDEFINED (type))
14573 *strict_overflow_p = true;
14578 case NON_LVALUE_EXPR:
14580 case FIX_TRUNC_EXPR:
14581 return tree_expr_nonnegative_warnv_p (op0,
14582 strict_overflow_p);
14586 tree inner_type = TREE_TYPE (op0);
14587 tree outer_type = type;
14589 if (TREE_CODE (outer_type) == REAL_TYPE)
14591 if (TREE_CODE (inner_type) == REAL_TYPE)
14592 return tree_expr_nonnegative_warnv_p (op0,
14593 strict_overflow_p);
14594 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14596 if (TYPE_UNSIGNED (inner_type))
14598 return tree_expr_nonnegative_warnv_p (op0,
14599 strict_overflow_p);
14602 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14604 if (TREE_CODE (inner_type) == REAL_TYPE)
14605 return tree_expr_nonnegative_warnv_p (op0,
14606 strict_overflow_p);
14607 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14608 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14609 && TYPE_UNSIGNED (inner_type);
14615 return tree_simple_nonnegative_warnv_p (code, type);
14618 /* We don't know sign of `t', so be conservative and return false. */
14622 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14623 value is based on the assumption that signed overflow is undefined,
14624 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14625 *STRICT_OVERFLOW_P. */
14628 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14629 tree op1, bool *strict_overflow_p)
14631 if (TYPE_UNSIGNED (type))
14636 case POINTER_PLUS_EXPR:
14638 if (FLOAT_TYPE_P (type))
14639 return (tree_expr_nonnegative_warnv_p (op0,
14641 && tree_expr_nonnegative_warnv_p (op1,
14642 strict_overflow_p));
14644 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14645 both unsigned and at least 2 bits shorter than the result. */
14646 if (TREE_CODE (type) == INTEGER_TYPE
14647 && TREE_CODE (op0) == NOP_EXPR
14648 && TREE_CODE (op1) == NOP_EXPR)
14650 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14651 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14652 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14653 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14655 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14656 TYPE_PRECISION (inner2)) + 1;
14657 return prec < TYPE_PRECISION (type);
14663 if (FLOAT_TYPE_P (type))
14665 /* x * x for floating point x is always non-negative. */
14666 if (operand_equal_p (op0, op1, 0))
14668 return (tree_expr_nonnegative_warnv_p (op0,
14670 && tree_expr_nonnegative_warnv_p (op1,
14671 strict_overflow_p));
14674 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14675 both unsigned and their total bits is shorter than the result. */
14676 if (TREE_CODE (type) == INTEGER_TYPE
14677 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14678 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14680 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14681 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14683 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14684 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14687 bool unsigned0 = TYPE_UNSIGNED (inner0);
14688 bool unsigned1 = TYPE_UNSIGNED (inner1);
14690 if (TREE_CODE (op0) == INTEGER_CST)
14691 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14693 if (TREE_CODE (op1) == INTEGER_CST)
14694 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14696 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14697 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14699 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14700 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14701 : TYPE_PRECISION (inner0);
14703 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14704 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14705 : TYPE_PRECISION (inner1);
14707 return precision0 + precision1 < TYPE_PRECISION (type);
14714 return (tree_expr_nonnegative_warnv_p (op0,
14716 || tree_expr_nonnegative_warnv_p (op1,
14717 strict_overflow_p));
14723 case TRUNC_DIV_EXPR:
14724 case CEIL_DIV_EXPR:
14725 case FLOOR_DIV_EXPR:
14726 case ROUND_DIV_EXPR:
14727 return (tree_expr_nonnegative_warnv_p (op0,
14729 && tree_expr_nonnegative_warnv_p (op1,
14730 strict_overflow_p));
14732 case TRUNC_MOD_EXPR:
14733 case CEIL_MOD_EXPR:
14734 case FLOOR_MOD_EXPR:
14735 case ROUND_MOD_EXPR:
14736 return tree_expr_nonnegative_warnv_p (op0,
14737 strict_overflow_p);
14739 return tree_simple_nonnegative_warnv_p (code, type);
14742 /* We don't know sign of `t', so be conservative and return false. */
14746 /* Return true if T is known to be non-negative. If the return
14747 value is based on the assumption that signed overflow is undefined,
14748 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14749 *STRICT_OVERFLOW_P. */
14752 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14754 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14757 switch (TREE_CODE (t))
14760 return tree_int_cst_sgn (t) >= 0;
14763 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14766 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14769 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14771 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14772 strict_overflow_p));
14774 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14777 /* We don't know sign of `t', so be conservative and return false. */
14781 /* Return true if T is known to be non-negative. If the return
14782 value is based on the assumption that signed overflow is undefined,
14783 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14784 *STRICT_OVERFLOW_P. */
14787 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14788 tree arg0, tree arg1, bool *strict_overflow_p)
14790 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14791 switch (DECL_FUNCTION_CODE (fndecl))
14793 CASE_FLT_FN (BUILT_IN_ACOS):
14794 CASE_FLT_FN (BUILT_IN_ACOSH):
14795 CASE_FLT_FN (BUILT_IN_CABS):
14796 CASE_FLT_FN (BUILT_IN_COSH):
14797 CASE_FLT_FN (BUILT_IN_ERFC):
14798 CASE_FLT_FN (BUILT_IN_EXP):
14799 CASE_FLT_FN (BUILT_IN_EXP10):
14800 CASE_FLT_FN (BUILT_IN_EXP2):
14801 CASE_FLT_FN (BUILT_IN_FABS):
14802 CASE_FLT_FN (BUILT_IN_FDIM):
14803 CASE_FLT_FN (BUILT_IN_HYPOT):
14804 CASE_FLT_FN (BUILT_IN_POW10):
14805 CASE_INT_FN (BUILT_IN_FFS):
14806 CASE_INT_FN (BUILT_IN_PARITY):
14807 CASE_INT_FN (BUILT_IN_POPCOUNT):
14808 case BUILT_IN_BSWAP32:
14809 case BUILT_IN_BSWAP64:
14813 CASE_FLT_FN (BUILT_IN_SQRT):
14814 /* sqrt(-0.0) is -0.0. */
14815 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14817 return tree_expr_nonnegative_warnv_p (arg0,
14818 strict_overflow_p);
14820 CASE_FLT_FN (BUILT_IN_ASINH):
14821 CASE_FLT_FN (BUILT_IN_ATAN):
14822 CASE_FLT_FN (BUILT_IN_ATANH):
14823 CASE_FLT_FN (BUILT_IN_CBRT):
14824 CASE_FLT_FN (BUILT_IN_CEIL):
14825 CASE_FLT_FN (BUILT_IN_ERF):
14826 CASE_FLT_FN (BUILT_IN_EXPM1):
14827 CASE_FLT_FN (BUILT_IN_FLOOR):
14828 CASE_FLT_FN (BUILT_IN_FMOD):
14829 CASE_FLT_FN (BUILT_IN_FREXP):
14830 CASE_FLT_FN (BUILT_IN_ICEIL):
14831 CASE_FLT_FN (BUILT_IN_IFLOOR):
14832 CASE_FLT_FN (BUILT_IN_IRINT):
14833 CASE_FLT_FN (BUILT_IN_IROUND):
14834 CASE_FLT_FN (BUILT_IN_LCEIL):
14835 CASE_FLT_FN (BUILT_IN_LDEXP):
14836 CASE_FLT_FN (BUILT_IN_LFLOOR):
14837 CASE_FLT_FN (BUILT_IN_LLCEIL):
14838 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14839 CASE_FLT_FN (BUILT_IN_LLRINT):
14840 CASE_FLT_FN (BUILT_IN_LLROUND):
14841 CASE_FLT_FN (BUILT_IN_LRINT):
14842 CASE_FLT_FN (BUILT_IN_LROUND):
14843 CASE_FLT_FN (BUILT_IN_MODF):
14844 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14845 CASE_FLT_FN (BUILT_IN_RINT):
14846 CASE_FLT_FN (BUILT_IN_ROUND):
14847 CASE_FLT_FN (BUILT_IN_SCALB):
14848 CASE_FLT_FN (BUILT_IN_SCALBLN):
14849 CASE_FLT_FN (BUILT_IN_SCALBN):
14850 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14851 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14852 CASE_FLT_FN (BUILT_IN_SINH):
14853 CASE_FLT_FN (BUILT_IN_TANH):
14854 CASE_FLT_FN (BUILT_IN_TRUNC):
14855 /* True if the 1st argument is nonnegative. */
14856 return tree_expr_nonnegative_warnv_p (arg0,
14857 strict_overflow_p);
14859 CASE_FLT_FN (BUILT_IN_FMAX):
14860 /* True if the 1st OR 2nd arguments are nonnegative. */
14861 return (tree_expr_nonnegative_warnv_p (arg0,
14863 || (tree_expr_nonnegative_warnv_p (arg1,
14864 strict_overflow_p)));
14866 CASE_FLT_FN (BUILT_IN_FMIN):
14867 /* True if the 1st AND 2nd arguments are nonnegative. */
14868 return (tree_expr_nonnegative_warnv_p (arg0,
14870 && (tree_expr_nonnegative_warnv_p (arg1,
14871 strict_overflow_p)));
14873 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14874 /* True if the 2nd argument is nonnegative. */
14875 return tree_expr_nonnegative_warnv_p (arg1,
14876 strict_overflow_p);
14878 CASE_FLT_FN (BUILT_IN_POWI):
14879 /* True if the 1st argument is nonnegative or the second
14880 argument is an even integer. */
14881 if (TREE_CODE (arg1) == INTEGER_CST
14882 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14884 return tree_expr_nonnegative_warnv_p (arg0,
14885 strict_overflow_p);
14887 CASE_FLT_FN (BUILT_IN_POW):
14888 /* True if the 1st argument is nonnegative or the second
14889 argument is an even integer valued real. */
14890 if (TREE_CODE (arg1) == REAL_CST)
14895 c = TREE_REAL_CST (arg1);
14896 n = real_to_integer (&c);
14899 REAL_VALUE_TYPE cint;
14900 real_from_integer (&cint, VOIDmode, n,
14901 n < 0 ? -1 : 0, 0);
14902 if (real_identical (&c, &cint))
14906 return tree_expr_nonnegative_warnv_p (arg0,
14907 strict_overflow_p);
14912 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14916 /* Return true if T is known to be non-negative. If the return
14917 value is based on the assumption that signed overflow is undefined,
14918 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14919 *STRICT_OVERFLOW_P. */
14922 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14924 enum tree_code code = TREE_CODE (t);
14925 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14932 tree temp = TARGET_EXPR_SLOT (t);
14933 t = TARGET_EXPR_INITIAL (t);
14935 /* If the initializer is non-void, then it's a normal expression
14936 that will be assigned to the slot. */
14937 if (!VOID_TYPE_P (t))
14938 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14940 /* Otherwise, the initializer sets the slot in some way. One common
14941 way is an assignment statement at the end of the initializer. */
14944 if (TREE_CODE (t) == BIND_EXPR)
14945 t = expr_last (BIND_EXPR_BODY (t));
14946 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14947 || TREE_CODE (t) == TRY_CATCH_EXPR)
14948 t = expr_last (TREE_OPERAND (t, 0));
14949 else if (TREE_CODE (t) == STATEMENT_LIST)
14954 if (TREE_CODE (t) == MODIFY_EXPR
14955 && TREE_OPERAND (t, 0) == temp)
14956 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14957 strict_overflow_p);
14964 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14965 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14967 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14968 get_callee_fndecl (t),
14971 strict_overflow_p);
14973 case COMPOUND_EXPR:
14975 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14976 strict_overflow_p);
14978 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14979 strict_overflow_p);
14981 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14982 strict_overflow_p);
14985 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14989 /* We don't know sign of `t', so be conservative and return false. */
14993 /* Return true if T is known to be non-negative. If the return
14994 value is based on the assumption that signed overflow is undefined,
14995 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14996 *STRICT_OVERFLOW_P. */
14999 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15001 enum tree_code code;
15002 if (t == error_mark_node)
15005 code = TREE_CODE (t);
15006 switch (TREE_CODE_CLASS (code))
15009 case tcc_comparison:
15010 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15012 TREE_OPERAND (t, 0),
15013 TREE_OPERAND (t, 1),
15014 strict_overflow_p);
15017 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15019 TREE_OPERAND (t, 0),
15020 strict_overflow_p);
15023 case tcc_declaration:
15024 case tcc_reference:
15025 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15033 case TRUTH_AND_EXPR:
15034 case TRUTH_OR_EXPR:
15035 case TRUTH_XOR_EXPR:
15036 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15038 TREE_OPERAND (t, 0),
15039 TREE_OPERAND (t, 1),
15040 strict_overflow_p);
15041 case TRUTH_NOT_EXPR:
15042 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15044 TREE_OPERAND (t, 0),
15045 strict_overflow_p);
15052 case WITH_SIZE_EXPR:
15054 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15057 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15061 /* Return true if `t' is known to be non-negative. Handle warnings
15062 about undefined signed overflow. */
15065 tree_expr_nonnegative_p (tree t)
15067 bool ret, strict_overflow_p;
15069 strict_overflow_p = false;
15070 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15071 if (strict_overflow_p)
15072 fold_overflow_warning (("assuming signed overflow does not occur when "
15073 "determining that expression is always "
15075 WARN_STRICT_OVERFLOW_MISC);
15080 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15081 For floating point we further ensure that T is not denormal.
15082 Similar logic is present in nonzero_address in rtlanal.h.
15084 If the return value is based on the assumption that signed overflow
15085 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15086 change *STRICT_OVERFLOW_P. */
15089 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15090 bool *strict_overflow_p)
15095 return tree_expr_nonzero_warnv_p (op0,
15096 strict_overflow_p);
15100 tree inner_type = TREE_TYPE (op0);
15101 tree outer_type = type;
15103 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15104 && tree_expr_nonzero_warnv_p (op0,
15105 strict_overflow_p));
15109 case NON_LVALUE_EXPR:
15110 return tree_expr_nonzero_warnv_p (op0,
15111 strict_overflow_p);
15120 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15121 For floating point we further ensure that T is not denormal.
15122 Similar logic is present in nonzero_address in rtlanal.h.
15124 If the return value is based on the assumption that signed overflow
15125 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15126 change *STRICT_OVERFLOW_P. */
15129 tree_binary_nonzero_warnv_p (enum tree_code code,
15132 tree op1, bool *strict_overflow_p)
15134 bool sub_strict_overflow_p;
15137 case POINTER_PLUS_EXPR:
15139 if (TYPE_OVERFLOW_UNDEFINED (type))
15141 /* With the presence of negative values it is hard
15142 to say something. */
15143 sub_strict_overflow_p = false;
15144 if (!tree_expr_nonnegative_warnv_p (op0,
15145 &sub_strict_overflow_p)
15146 || !tree_expr_nonnegative_warnv_p (op1,
15147 &sub_strict_overflow_p))
15149 /* One of operands must be positive and the other non-negative. */
15150 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15151 overflows, on a twos-complement machine the sum of two
15152 nonnegative numbers can never be zero. */
15153 return (tree_expr_nonzero_warnv_p (op0,
15155 || tree_expr_nonzero_warnv_p (op1,
15156 strict_overflow_p));
15161 if (TYPE_OVERFLOW_UNDEFINED (type))
15163 if (tree_expr_nonzero_warnv_p (op0,
15165 && tree_expr_nonzero_warnv_p (op1,
15166 strict_overflow_p))
15168 *strict_overflow_p = true;
15175 sub_strict_overflow_p = false;
15176 if (tree_expr_nonzero_warnv_p (op0,
15177 &sub_strict_overflow_p)
15178 && tree_expr_nonzero_warnv_p (op1,
15179 &sub_strict_overflow_p))
15181 if (sub_strict_overflow_p)
15182 *strict_overflow_p = true;
15187 sub_strict_overflow_p = false;
15188 if (tree_expr_nonzero_warnv_p (op0,
15189 &sub_strict_overflow_p))
15191 if (sub_strict_overflow_p)
15192 *strict_overflow_p = true;
15194 /* When both operands are nonzero, then MAX must be too. */
15195 if (tree_expr_nonzero_warnv_p (op1,
15196 strict_overflow_p))
15199 /* MAX where operand 0 is positive is positive. */
15200 return tree_expr_nonnegative_warnv_p (op0,
15201 strict_overflow_p);
15203 /* MAX where operand 1 is positive is positive. */
15204 else if (tree_expr_nonzero_warnv_p (op1,
15205 &sub_strict_overflow_p)
15206 && tree_expr_nonnegative_warnv_p (op1,
15207 &sub_strict_overflow_p))
15209 if (sub_strict_overflow_p)
15210 *strict_overflow_p = true;
15216 return (tree_expr_nonzero_warnv_p (op1,
15218 || tree_expr_nonzero_warnv_p (op0,
15219 strict_overflow_p));
15228 /* Return true when T is an address and is known to be nonzero.
15229 For floating point we further ensure that T is not denormal.
15230 Similar logic is present in nonzero_address in rtlanal.h.
15232 If the return value is based on the assumption that signed overflow
15233 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15234 change *STRICT_OVERFLOW_P. */
15237 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15239 bool sub_strict_overflow_p;
15240 switch (TREE_CODE (t))
15243 return !integer_zerop (t);
15247 tree base = TREE_OPERAND (t, 0);
15248 if (!DECL_P (base))
15249 base = get_base_address (base);
15254 /* Weak declarations may link to NULL. Other things may also be NULL
15255 so protect with -fdelete-null-pointer-checks; but not variables
15256 allocated on the stack. */
15258 && (flag_delete_null_pointer_checks
15259 || (DECL_CONTEXT (base)
15260 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15261 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15262 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15264 /* Constants are never weak. */
15265 if (CONSTANT_CLASS_P (base))
15272 sub_strict_overflow_p = false;
15273 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15274 &sub_strict_overflow_p)
15275 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15276 &sub_strict_overflow_p))
15278 if (sub_strict_overflow_p)
15279 *strict_overflow_p = true;
15290 /* Return true when T is an address and is known to be nonzero.
15291 For floating point we further ensure that T is not denormal.
15292 Similar logic is present in nonzero_address in rtlanal.h.
15294 If the return value is based on the assumption that signed overflow
15295 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15296 change *STRICT_OVERFLOW_P. */
15299 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15301 tree type = TREE_TYPE (t);
15302 enum tree_code code;
15304 /* Doing something useful for floating point would need more work. */
15305 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15308 code = TREE_CODE (t);
15309 switch (TREE_CODE_CLASS (code))
15312 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15313 strict_overflow_p);
15315 case tcc_comparison:
15316 return tree_binary_nonzero_warnv_p (code, type,
15317 TREE_OPERAND (t, 0),
15318 TREE_OPERAND (t, 1),
15319 strict_overflow_p);
15321 case tcc_declaration:
15322 case tcc_reference:
15323 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15331 case TRUTH_NOT_EXPR:
15332 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15333 strict_overflow_p);
15335 case TRUTH_AND_EXPR:
15336 case TRUTH_OR_EXPR:
15337 case TRUTH_XOR_EXPR:
15338 return tree_binary_nonzero_warnv_p (code, type,
15339 TREE_OPERAND (t, 0),
15340 TREE_OPERAND (t, 1),
15341 strict_overflow_p);
15348 case WITH_SIZE_EXPR:
15350 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15352 case COMPOUND_EXPR:
15355 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15356 strict_overflow_p);
15359 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15360 strict_overflow_p);
15363 return alloca_call_p (t);
15371 /* Return true when T is an address and is known to be nonzero.
15372 Handle warnings about undefined signed overflow. */
15375 tree_expr_nonzero_p (tree t)
15377 bool ret, strict_overflow_p;
15379 strict_overflow_p = false;
15380 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15381 if (strict_overflow_p)
15382 fold_overflow_warning (("assuming signed overflow does not occur when "
15383 "determining that expression is always "
15385 WARN_STRICT_OVERFLOW_MISC);
15389 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15390 attempt to fold the expression to a constant without modifying TYPE,
15393 If the expression could be simplified to a constant, then return
15394 the constant. If the expression would not be simplified to a
15395 constant, then return NULL_TREE. */
15398 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15400 tree tem = fold_binary (code, type, op0, op1);
15401 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15404 /* Given the components of a unary expression CODE, TYPE and OP0,
15405 attempt to fold the expression to a constant without modifying
15408 If the expression could be simplified to a constant, then return
15409 the constant. If the expression would not be simplified to a
15410 constant, then return NULL_TREE. */
15413 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15415 tree tem = fold_unary (code, type, op0);
15416 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15419 /* If EXP represents referencing an element in a constant string
15420 (either via pointer arithmetic or array indexing), return the
15421 tree representing the value accessed, otherwise return NULL. */
15424 fold_read_from_constant_string (tree exp)
15426 if ((TREE_CODE (exp) == INDIRECT_REF
15427 || TREE_CODE (exp) == ARRAY_REF)
15428 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15430 tree exp1 = TREE_OPERAND (exp, 0);
15433 location_t loc = EXPR_LOCATION (exp);
15435 if (TREE_CODE (exp) == INDIRECT_REF)
15436 string = string_constant (exp1, &index);
15439 tree low_bound = array_ref_low_bound (exp);
15440 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15442 /* Optimize the special-case of a zero lower bound.
15444 We convert the low_bound to sizetype to avoid some problems
15445 with constant folding. (E.g. suppose the lower bound is 1,
15446 and its mode is QI. Without the conversion,l (ARRAY
15447 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15448 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15449 if (! integer_zerop (low_bound))
15450 index = size_diffop_loc (loc, index,
15451 fold_convert_loc (loc, sizetype, low_bound));
15457 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15458 && TREE_CODE (string) == STRING_CST
15459 && TREE_CODE (index) == INTEGER_CST
15460 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15461 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15463 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15464 return build_int_cst_type (TREE_TYPE (exp),
15465 (TREE_STRING_POINTER (string)
15466 [TREE_INT_CST_LOW (index)]));
15471 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15472 an integer constant, real, or fixed-point constant.
15474 TYPE is the type of the result. */
15477 fold_negate_const (tree arg0, tree type)
15479 tree t = NULL_TREE;
15481 switch (TREE_CODE (arg0))
15485 double_int val = tree_to_double_int (arg0);
15486 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15488 t = force_fit_type_double (type, val, 1,
15489 (overflow | TREE_OVERFLOW (arg0))
15490 && !TYPE_UNSIGNED (type));
15495 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15500 FIXED_VALUE_TYPE f;
15501 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15502 &(TREE_FIXED_CST (arg0)), NULL,
15503 TYPE_SATURATING (type));
15504 t = build_fixed (type, f);
15505 /* Propagate overflow flags. */
15506 if (overflow_p | TREE_OVERFLOW (arg0))
15507 TREE_OVERFLOW (t) = 1;
15512 gcc_unreachable ();
15518 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15519 an integer constant or real constant.
15521 TYPE is the type of the result. */
15524 fold_abs_const (tree arg0, tree type)
15526 tree t = NULL_TREE;
15528 switch (TREE_CODE (arg0))
15532 double_int val = tree_to_double_int (arg0);
15534 /* If the value is unsigned or non-negative, then the absolute value
15535 is the same as the ordinary value. */
15536 if (TYPE_UNSIGNED (type)
15537 || !double_int_negative_p (val))
15540 /* If the value is negative, then the absolute value is
15546 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15547 t = force_fit_type_double (type, val, -1,
15548 overflow | TREE_OVERFLOW (arg0));
15554 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15555 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15561 gcc_unreachable ();
15567 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15568 constant. TYPE is the type of the result. */
15571 fold_not_const (const_tree arg0, tree type)
15575 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15577 val = double_int_not (tree_to_double_int (arg0));
15578 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15581 /* Given CODE, a relational operator, the target type, TYPE and two
15582 constant operands OP0 and OP1, return the result of the
15583 relational operation. If the result is not a compile time
15584 constant, then return NULL_TREE. */
15587 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15589 int result, invert;
15591 /* From here on, the only cases we handle are when the result is
15592 known to be a constant. */
15594 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15596 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15597 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15599 /* Handle the cases where either operand is a NaN. */
15600 if (real_isnan (c0) || real_isnan (c1))
15610 case UNORDERED_EXPR:
15624 if (flag_trapping_math)
15630 gcc_unreachable ();
15633 return constant_boolean_node (result, type);
15636 return constant_boolean_node (real_compare (code, c0, c1), type);
15639 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15641 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15642 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15643 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15646 /* Handle equality/inequality of complex constants. */
15647 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15649 tree rcond = fold_relational_const (code, type,
15650 TREE_REALPART (op0),
15651 TREE_REALPART (op1));
15652 tree icond = fold_relational_const (code, type,
15653 TREE_IMAGPART (op0),
15654 TREE_IMAGPART (op1));
15655 if (code == EQ_EXPR)
15656 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15657 else if (code == NE_EXPR)
15658 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15663 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15665 To compute GT, swap the arguments and do LT.
15666 To compute GE, do LT and invert the result.
15667 To compute LE, swap the arguments, do LT and invert the result.
15668 To compute NE, do EQ and invert the result.
15670 Therefore, the code below must handle only EQ and LT. */
15672 if (code == LE_EXPR || code == GT_EXPR)
15677 code = swap_tree_comparison (code);
15680 /* Note that it is safe to invert for real values here because we
15681 have already handled the one case that it matters. */
15684 if (code == NE_EXPR || code == GE_EXPR)
15687 code = invert_tree_comparison (code, false);
15690 /* Compute a result for LT or EQ if args permit;
15691 Otherwise return T. */
15692 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15694 if (code == EQ_EXPR)
15695 result = tree_int_cst_equal (op0, op1);
15696 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15697 result = INT_CST_LT_UNSIGNED (op0, op1);
15699 result = INT_CST_LT (op0, op1);
15706 return constant_boolean_node (result, type);
15709 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15710 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15714 fold_build_cleanup_point_expr (tree type, tree expr)
15716 /* If the expression does not have side effects then we don't have to wrap
15717 it with a cleanup point expression. */
15718 if (!TREE_SIDE_EFFECTS (expr))
15721 /* If the expression is a return, check to see if the expression inside the
15722 return has no side effects or the right hand side of the modify expression
15723 inside the return. If either don't have side effects set we don't need to
15724 wrap the expression in a cleanup point expression. Note we don't check the
15725 left hand side of the modify because it should always be a return decl. */
15726 if (TREE_CODE (expr) == RETURN_EXPR)
15728 tree op = TREE_OPERAND (expr, 0);
15729 if (!op || !TREE_SIDE_EFFECTS (op))
15731 op = TREE_OPERAND (op, 1);
15732 if (!TREE_SIDE_EFFECTS (op))
15736 return build1 (CLEANUP_POINT_EXPR, type, expr);
15739 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15740 of an indirection through OP0, or NULL_TREE if no simplification is
15744 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15750 subtype = TREE_TYPE (sub);
15751 if (!POINTER_TYPE_P (subtype))
15754 if (TREE_CODE (sub) == ADDR_EXPR)
15756 tree op = TREE_OPERAND (sub, 0);
15757 tree optype = TREE_TYPE (op);
15758 /* *&CONST_DECL -> to the value of the const decl. */
15759 if (TREE_CODE (op) == CONST_DECL)
15760 return DECL_INITIAL (op);
15761 /* *&p => p; make sure to handle *&"str"[cst] here. */
15762 if (type == optype)
15764 tree fop = fold_read_from_constant_string (op);
15770 /* *(foo *)&fooarray => fooarray[0] */
15771 else if (TREE_CODE (optype) == ARRAY_TYPE
15772 && type == TREE_TYPE (optype)
15773 && (!in_gimple_form
15774 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15776 tree type_domain = TYPE_DOMAIN (optype);
15777 tree min_val = size_zero_node;
15778 if (type_domain && TYPE_MIN_VALUE (type_domain))
15779 min_val = TYPE_MIN_VALUE (type_domain);
15781 && TREE_CODE (min_val) != INTEGER_CST)
15783 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15784 NULL_TREE, NULL_TREE);
15786 /* *(foo *)&complexfoo => __real__ complexfoo */
15787 else if (TREE_CODE (optype) == COMPLEX_TYPE
15788 && type == TREE_TYPE (optype))
15789 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15790 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15791 else if (TREE_CODE (optype) == VECTOR_TYPE
15792 && type == TREE_TYPE (optype))
15794 tree part_width = TYPE_SIZE (type);
15795 tree index = bitsize_int (0);
15796 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15800 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15801 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15803 tree op00 = TREE_OPERAND (sub, 0);
15804 tree op01 = TREE_OPERAND (sub, 1);
15807 if (TREE_CODE (op00) == ADDR_EXPR)
15810 op00 = TREE_OPERAND (op00, 0);
15811 op00type = TREE_TYPE (op00);
15813 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15814 if (TREE_CODE (op00type) == VECTOR_TYPE
15815 && type == TREE_TYPE (op00type))
15817 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15818 tree part_width = TYPE_SIZE (type);
15819 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15820 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15821 tree index = bitsize_int (indexi);
15823 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15824 return fold_build3_loc (loc,
15825 BIT_FIELD_REF, type, op00,
15826 part_width, index);
15829 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15830 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15831 && type == TREE_TYPE (op00type))
15833 tree size = TYPE_SIZE_UNIT (type);
15834 if (tree_int_cst_equal (size, op01))
15835 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15837 /* ((foo *)&fooarray)[1] => fooarray[1] */
15838 else if (TREE_CODE (op00type) == ARRAY_TYPE
15839 && type == TREE_TYPE (op00type))
15841 tree type_domain = TYPE_DOMAIN (op00type);
15842 tree min_val = size_zero_node;
15843 if (type_domain && TYPE_MIN_VALUE (type_domain))
15844 min_val = TYPE_MIN_VALUE (type_domain);
15845 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15846 TYPE_SIZE_UNIT (type));
15847 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15848 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15849 NULL_TREE, NULL_TREE);
15854 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15855 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15856 && type == TREE_TYPE (TREE_TYPE (subtype))
15857 && (!in_gimple_form
15858 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15861 tree min_val = size_zero_node;
15862 sub = build_fold_indirect_ref_loc (loc, sub);
15863 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15864 if (type_domain && TYPE_MIN_VALUE (type_domain))
15865 min_val = TYPE_MIN_VALUE (type_domain);
15867 && TREE_CODE (min_val) != INTEGER_CST)
15869 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15876 /* Builds an expression for an indirection through T, simplifying some
15880 build_fold_indirect_ref_loc (location_t loc, tree t)
15882 tree type = TREE_TYPE (TREE_TYPE (t));
15883 tree sub = fold_indirect_ref_1 (loc, type, t);
15888 return build1_loc (loc, INDIRECT_REF, type, t);
15891 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15894 fold_indirect_ref_loc (location_t loc, tree t)
15896 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15904 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15905 whose result is ignored. The type of the returned tree need not be
15906 the same as the original expression. */
15909 fold_ignored_result (tree t)
15911 if (!TREE_SIDE_EFFECTS (t))
15912 return integer_zero_node;
15915 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15918 t = TREE_OPERAND (t, 0);
15922 case tcc_comparison:
15923 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15924 t = TREE_OPERAND (t, 0);
15925 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15926 t = TREE_OPERAND (t, 1);
15931 case tcc_expression:
15932 switch (TREE_CODE (t))
15934 case COMPOUND_EXPR:
15935 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15937 t = TREE_OPERAND (t, 0);
15941 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15942 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15944 t = TREE_OPERAND (t, 0);
15957 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15958 This can only be applied to objects of a sizetype. */
15961 round_up_loc (location_t loc, tree value, int divisor)
15963 tree div = NULL_TREE;
15965 gcc_assert (divisor > 0);
15969 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15970 have to do anything. Only do this when we are not given a const,
15971 because in that case, this check is more expensive than just
15973 if (TREE_CODE (value) != INTEGER_CST)
15975 div = build_int_cst (TREE_TYPE (value), divisor);
15977 if (multiple_of_p (TREE_TYPE (value), value, div))
15981 /* If divisor is a power of two, simplify this to bit manipulation. */
15982 if (divisor == (divisor & -divisor))
15984 if (TREE_CODE (value) == INTEGER_CST)
15986 double_int val = tree_to_double_int (value);
15989 if ((val.low & (divisor - 1)) == 0)
15992 overflow_p = TREE_OVERFLOW (value);
15993 val.low &= ~(divisor - 1);
15994 val.low += divisor;
16002 return force_fit_type_double (TREE_TYPE (value), val,
16009 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16010 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16011 t = build_int_cst (TREE_TYPE (value), -divisor);
16012 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16018 div = build_int_cst (TREE_TYPE (value), divisor);
16019 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16020 value = size_binop_loc (loc, MULT_EXPR, value, div);
16026 /* Likewise, but round down. */
16029 round_down_loc (location_t loc, tree value, int divisor)
16031 tree div = NULL_TREE;
16033 gcc_assert (divisor > 0);
16037 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16038 have to do anything. Only do this when we are not given a const,
16039 because in that case, this check is more expensive than just
16041 if (TREE_CODE (value) != INTEGER_CST)
16043 div = build_int_cst (TREE_TYPE (value), divisor);
16045 if (multiple_of_p (TREE_TYPE (value), value, div))
16049 /* If divisor is a power of two, simplify this to bit manipulation. */
16050 if (divisor == (divisor & -divisor))
16054 t = build_int_cst (TREE_TYPE (value), -divisor);
16055 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16060 div = build_int_cst (TREE_TYPE (value), divisor);
16061 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16062 value = size_binop_loc (loc, MULT_EXPR, value, div);
16068 /* Returns the pointer to the base of the object addressed by EXP and
16069 extracts the information about the offset of the access, storing it
16070 to PBITPOS and POFFSET. */
16073 split_address_to_core_and_offset (tree exp,
16074 HOST_WIDE_INT *pbitpos, tree *poffset)
16077 enum machine_mode mode;
16078 int unsignedp, volatilep;
16079 HOST_WIDE_INT bitsize;
16080 location_t loc = EXPR_LOCATION (exp);
16082 if (TREE_CODE (exp) == ADDR_EXPR)
16084 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16085 poffset, &mode, &unsignedp, &volatilep,
16087 core = build_fold_addr_expr_loc (loc, core);
16093 *poffset = NULL_TREE;
16099 /* Returns true if addresses of E1 and E2 differ by a constant, false
16100 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16103 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16106 HOST_WIDE_INT bitpos1, bitpos2;
16107 tree toffset1, toffset2, tdiff, type;
16109 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16110 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16112 if (bitpos1 % BITS_PER_UNIT != 0
16113 || bitpos2 % BITS_PER_UNIT != 0
16114 || !operand_equal_p (core1, core2, 0))
16117 if (toffset1 && toffset2)
16119 type = TREE_TYPE (toffset1);
16120 if (type != TREE_TYPE (toffset2))
16121 toffset2 = fold_convert (type, toffset2);
16123 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16124 if (!cst_and_fits_in_hwi (tdiff))
16127 *diff = int_cst_value (tdiff);
16129 else if (toffset1 || toffset2)
16131 /* If only one of the offsets is non-constant, the difference cannot
16138 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16142 /* Simplify the floating point expression EXP when the sign of the
16143 result is not significant. Return NULL_TREE if no simplification
16147 fold_strip_sign_ops (tree exp)
16150 location_t loc = EXPR_LOCATION (exp);
16152 switch (TREE_CODE (exp))
16156 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16157 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16161 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16163 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16164 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16165 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16166 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16167 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16168 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16171 case COMPOUND_EXPR:
16172 arg0 = TREE_OPERAND (exp, 0);
16173 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16175 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16179 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16180 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16182 return fold_build3_loc (loc,
16183 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16184 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16185 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16190 const enum built_in_function fcode = builtin_mathfn_code (exp);
16193 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16194 /* Strip copysign function call, return the 1st argument. */
16195 arg0 = CALL_EXPR_ARG (exp, 0);
16196 arg1 = CALL_EXPR_ARG (exp, 1);
16197 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16200 /* Strip sign ops from the argument of "odd" math functions. */
16201 if (negate_mathfn_p (fcode))
16203 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16205 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);