1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc != UNKNOWN_LOCATION ? tloc : loc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
167 SET_EXPR_LOCATION (x, loc);
173 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
174 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
175 and SUM1. Then this yields nonzero if overflow occurred during the
178 Overflow occurs if A and B have the same sign, but A and SUM differ in
179 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
181 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
183 /* If ARG2 divides ARG1 with zero remainder, carries out the division
184 of type CODE and returns the quotient.
185 Otherwise returns NULL_TREE. */
188 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
193 /* The sign of the division is according to operand two, that
194 does the correct thing for POINTER_PLUS_EXPR where we want
195 a signed division. */
196 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
197 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
198 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
201 quo = double_int_divmod (tree_to_double_int (arg1),
202 tree_to_double_int (arg2),
205 if (double_int_zero_p (rem))
206 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
211 /* This is nonzero if we should defer warnings about undefined
212 overflow. This facility exists because these warnings are a
213 special case. The code to estimate loop iterations does not want
214 to issue any warnings, since it works with expressions which do not
215 occur in user code. Various bits of cleanup code call fold(), but
216 only use the result if it has certain characteristics (e.g., is a
217 constant); that code only wants to issue a warning if the result is
220 static int fold_deferring_overflow_warnings;
222 /* If a warning about undefined overflow is deferred, this is the
223 warning. Note that this may cause us to turn two warnings into
224 one, but that is fine since it is sufficient to only give one
225 warning per expression. */
227 static const char* fold_deferred_overflow_warning;
229 /* If a warning about undefined overflow is deferred, this is the
230 level at which the warning should be emitted. */
232 static enum warn_strict_overflow_code fold_deferred_overflow_code;
234 /* Start deferring overflow warnings. We could use a stack here to
235 permit nested calls, but at present it is not necessary. */
238 fold_defer_overflow_warnings (void)
240 ++fold_deferring_overflow_warnings;
243 /* Stop deferring overflow warnings. If there is a pending warning,
244 and ISSUE is true, then issue the warning if appropriate. STMT is
245 the statement with which the warning should be associated (used for
246 location information); STMT may be NULL. CODE is the level of the
247 warning--a warn_strict_overflow_code value. This function will use
248 the smaller of CODE and the deferred code when deciding whether to
249 issue the warning. CODE may be zero to mean to always use the
253 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
258 gcc_assert (fold_deferring_overflow_warnings > 0);
259 --fold_deferring_overflow_warnings;
260 if (fold_deferring_overflow_warnings > 0)
262 if (fold_deferred_overflow_warning != NULL
264 && code < (int) fold_deferred_overflow_code)
265 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
269 warnmsg = fold_deferred_overflow_warning;
270 fold_deferred_overflow_warning = NULL;
272 if (!issue || warnmsg == NULL)
275 if (gimple_no_warning_p (stmt))
278 /* Use the smallest code level when deciding to issue the
280 if (code == 0 || code > (int) fold_deferred_overflow_code)
281 code = fold_deferred_overflow_code;
283 if (!issue_strict_overflow_warning (code))
287 locus = input_location;
289 locus = gimple_location (stmt);
290 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
293 /* Stop deferring overflow warnings, ignoring any deferred
297 fold_undefer_and_ignore_overflow_warnings (void)
299 fold_undefer_overflow_warnings (false, NULL, 0);
302 /* Whether we are deferring overflow warnings. */
305 fold_deferring_overflow_warnings_p (void)
307 return fold_deferring_overflow_warnings > 0;
310 /* This is called when we fold something based on the fact that signed
311 overflow is undefined. */
314 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
316 if (fold_deferring_overflow_warnings > 0)
318 if (fold_deferred_overflow_warning == NULL
319 || wc < fold_deferred_overflow_code)
321 fold_deferred_overflow_warning = gmsgid;
322 fold_deferred_overflow_code = wc;
325 else if (issue_strict_overflow_warning (wc))
326 warning (OPT_Wstrict_overflow, gmsgid);
329 /* Return true if the built-in mathematical function specified by CODE
330 is odd, i.e. -f(x) == f(-x). */
333 negate_mathfn_p (enum built_in_function code)
337 CASE_FLT_FN (BUILT_IN_ASIN):
338 CASE_FLT_FN (BUILT_IN_ASINH):
339 CASE_FLT_FN (BUILT_IN_ATAN):
340 CASE_FLT_FN (BUILT_IN_ATANH):
341 CASE_FLT_FN (BUILT_IN_CASIN):
342 CASE_FLT_FN (BUILT_IN_CASINH):
343 CASE_FLT_FN (BUILT_IN_CATAN):
344 CASE_FLT_FN (BUILT_IN_CATANH):
345 CASE_FLT_FN (BUILT_IN_CBRT):
346 CASE_FLT_FN (BUILT_IN_CPROJ):
347 CASE_FLT_FN (BUILT_IN_CSIN):
348 CASE_FLT_FN (BUILT_IN_CSINH):
349 CASE_FLT_FN (BUILT_IN_CTAN):
350 CASE_FLT_FN (BUILT_IN_CTANH):
351 CASE_FLT_FN (BUILT_IN_ERF):
352 CASE_FLT_FN (BUILT_IN_LLROUND):
353 CASE_FLT_FN (BUILT_IN_LROUND):
354 CASE_FLT_FN (BUILT_IN_ROUND):
355 CASE_FLT_FN (BUILT_IN_SIN):
356 CASE_FLT_FN (BUILT_IN_SINH):
357 CASE_FLT_FN (BUILT_IN_TAN):
358 CASE_FLT_FN (BUILT_IN_TANH):
359 CASE_FLT_FN (BUILT_IN_TRUNC):
362 CASE_FLT_FN (BUILT_IN_LLRINT):
363 CASE_FLT_FN (BUILT_IN_LRINT):
364 CASE_FLT_FN (BUILT_IN_NEARBYINT):
365 CASE_FLT_FN (BUILT_IN_RINT):
366 return !flag_rounding_math;
374 /* Check whether we may negate an integer constant T without causing
378 may_negate_without_overflow_p (const_tree t)
380 unsigned HOST_WIDE_INT val;
384 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386 type = TREE_TYPE (t);
387 if (TYPE_UNSIGNED (type))
390 prec = TYPE_PRECISION (type);
391 if (prec > HOST_BITS_PER_WIDE_INT)
393 if (TREE_INT_CST_LOW (t) != 0)
395 prec -= HOST_BITS_PER_WIDE_INT;
396 val = TREE_INT_CST_HIGH (t);
399 val = TREE_INT_CST_LOW (t);
400 if (prec < HOST_BITS_PER_WIDE_INT)
401 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
402 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
405 /* Determine whether an expression T can be cheaply negated using
406 the function negate_expr without introducing undefined overflow. */
409 negate_expr_p (tree t)
416 type = TREE_TYPE (t);
419 switch (TREE_CODE (t))
422 if (TYPE_OVERFLOW_WRAPS (type))
425 /* Check that -CST will not overflow type. */
426 return may_negate_without_overflow_p (t);
428 return (INTEGRAL_TYPE_P (type)
429 && TYPE_OVERFLOW_WRAPS (type));
436 /* We want to canonicalize to positive real constants. Pretend
437 that only negative ones can be easily negated. */
438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
441 return negate_expr_p (TREE_REALPART (t))
442 && negate_expr_p (TREE_IMAGPART (t));
445 return negate_expr_p (TREE_OPERAND (t, 0))
446 && negate_expr_p (TREE_OPERAND (t, 1));
449 return negate_expr_p (TREE_OPERAND (t, 0));
452 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
453 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
455 /* -(A + B) -> (-B) - A. */
456 if (negate_expr_p (TREE_OPERAND (t, 1))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1)))
460 /* -(A + B) -> (-A) - B. */
461 return negate_expr_p (TREE_OPERAND (t, 0));
464 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
465 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
467 && reorder_operands_p (TREE_OPERAND (t, 0),
468 TREE_OPERAND (t, 1));
471 if (TYPE_UNSIGNED (TREE_TYPE (t)))
477 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
478 return negate_expr_p (TREE_OPERAND (t, 1))
479 || negate_expr_p (TREE_OPERAND (t, 0));
487 /* In general we can't negate A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. But if overflow is
490 undefined, we can negate, because - (INT_MIN / 1) is an
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
495 return negate_expr_p (TREE_OPERAND (t, 1))
496 || negate_expr_p (TREE_OPERAND (t, 0));
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
504 return negate_expr_p (tem);
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (builtin_mathfn_code (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (TREE_INT_CST_HIGH (op1) == 0
520 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
521 == TREE_INT_CST_LOW (op1))
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_int_cst (type, 1));
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 tem = fold_negate_const (t, type);
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 return TREE_OPERAND (t, 0);
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1))
605 && reorder_operands_p (TREE_OPERAND (t, 0),
606 TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
627 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
633 if (TYPE_UNSIGNED (type))
639 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
643 return fold_build2_loc (loc, TREE_CODE (t), type,
644 TREE_OPERAND (t, 0), negate_expr (tem));
645 tem = TREE_OPERAND (t, 0);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 negate_expr (tem), TREE_OPERAND (t, 1));
657 /* In general we can't negate A / B, because if A is INT_MIN and
658 B is 1, we may turn this into INT_MIN / -1 which is undefined
659 and actually traps on some architectures. But if overflow is
660 undefined, we can negate, because - (INT_MIN / 1) is an
662 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
664 const char * const warnmsg = G_("assuming signed overflow does not "
665 "occur when negating a division");
666 tem = TREE_OPERAND (t, 1);
667 if (negate_expr_p (tem))
669 if (INTEGRAL_TYPE_P (type)
670 && (TREE_CODE (tem) != INTEGER_CST
671 || integer_onep (tem)))
672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 negate_expr (tem), TREE_OPERAND (t, 1));
690 /* Convert -((double)float) into (double)(-float). */
691 if (TREE_CODE (type) == REAL_TYPE)
693 tem = strip_float_extensions (t);
694 if (tem != t && negate_expr_p (tem))
695 return fold_convert_loc (loc, type, negate_expr (tem));
700 /* Negate -f(x) as f(-x). */
701 if (negate_mathfn_p (builtin_mathfn_code (t))
702 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
706 fndecl = get_callee_fndecl (t);
707 arg = negate_expr (CALL_EXPR_ARG (t, 0));
708 return build_call_expr_loc (loc, fndecl, 1, arg);
713 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
714 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
716 tree op1 = TREE_OPERAND (t, 1);
717 if (TREE_INT_CST_HIGH (op1) == 0
718 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
719 == TREE_INT_CST_LOW (op1))
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
755 tem = fold_negate_expr (loc, t);
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
782 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
783 tree *minus_litp, int negate_p)
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
836 *minus_litp = *litp, *litp = 0;
838 *conp = negate_expr (*conp);
840 var = negate_expr (var);
842 else if (TREE_CONSTANT (in))
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
916 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time. */
942 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
944 double_int op1, op2, res, tmp;
946 tree type = TREE_TYPE (arg1);
947 bool uns = TYPE_UNSIGNED (type);
949 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
950 bool overflow = false;
952 op1 = tree_to_double_int (arg1);
953 op2 = tree_to_double_int (arg2);
958 res = double_int_ior (op1, op2);
962 res = double_int_xor (op1, op2);
966 res = double_int_and (op1, op2);
970 res = double_int_rshift (op1, double_int_to_shwi (op2),
971 TYPE_PRECISION (type), !uns);
975 /* It's unclear from the C standard whether shifts can overflow.
976 The following code ignores overflow; perhaps a C standard
977 interpretation ruling is needed. */
978 res = double_int_lshift (op1, double_int_to_shwi (op2),
979 TYPE_PRECISION (type), !uns);
983 res = double_int_rrotate (op1, double_int_to_shwi (op2),
984 TYPE_PRECISION (type));
988 res = double_int_lrotate (op1, double_int_to_shwi (op2),
989 TYPE_PRECISION (type));
993 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
994 &res.low, &res.high);
998 neg_double (op2.low, op2.high, &res.low, &res.high);
999 add_double (op1.low, op1.high, res.low, res.high,
1000 &res.low, &res.high);
1001 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1005 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1006 &res.low, &res.high);
1009 case TRUNC_DIV_EXPR:
1010 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1011 case EXACT_DIV_EXPR:
1012 /* This is a shortcut for a common special case. */
1013 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1014 && !TREE_OVERFLOW (arg1)
1015 && !TREE_OVERFLOW (arg2)
1016 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1018 if (code == CEIL_DIV_EXPR)
1019 op1.low += op2.low - 1;
1021 res.low = op1.low / op2.low, res.high = 0;
1025 /* ... fall through ... */
1027 case ROUND_DIV_EXPR:
1028 if (double_int_zero_p (op2))
1030 if (double_int_one_p (op2))
1035 if (double_int_equal_p (op1, op2)
1036 && ! double_int_zero_p (op1))
1038 res = double_int_one;
1041 overflow = div_and_round_double (code, uns,
1042 op1.low, op1.high, op2.low, op2.high,
1043 &res.low, &res.high,
1044 &tmp.low, &tmp.high);
1047 case TRUNC_MOD_EXPR:
1048 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1049 /* This is a shortcut for a common special case. */
1050 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1051 && !TREE_OVERFLOW (arg1)
1052 && !TREE_OVERFLOW (arg2)
1053 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1055 if (code == CEIL_MOD_EXPR)
1056 op1.low += op2.low - 1;
1057 res.low = op1.low % op2.low, res.high = 0;
1061 /* ... fall through ... */
1063 case ROUND_MOD_EXPR:
1064 if (double_int_zero_p (op2))
1066 overflow = div_and_round_double (code, uns,
1067 op1.low, op1.high, op2.low, op2.high,
1068 &tmp.low, &tmp.high,
1069 &res.low, &res.high);
1073 res = double_int_min (op1, op2, uns);
1077 res = double_int_max (op1, op2, uns);
1084 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1085 ((!uns || is_sizetype) && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1091 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1092 constant. We assume ARG1 and ARG2 have the same data type, or at least
1093 are the same kind of constant and the same machine mode. Return zero if
1094 combining the constants is not allowed in the current operating mode. */
1097 const_binop (enum tree_code code, tree arg1, tree arg2)
1099 /* Sanity check for the recursive cases. */
1106 if (TREE_CODE (arg1) == INTEGER_CST)
1107 return int_const_binop (code, arg1, arg2);
1109 if (TREE_CODE (arg1) == REAL_CST)
1111 enum machine_mode mode;
1114 REAL_VALUE_TYPE value;
1115 REAL_VALUE_TYPE result;
1119 /* The following codes are handled by real_arithmetic. */
1134 d1 = TREE_REAL_CST (arg1);
1135 d2 = TREE_REAL_CST (arg2);
1137 type = TREE_TYPE (arg1);
1138 mode = TYPE_MODE (type);
1140 /* Don't perform operation if we honor signaling NaNs and
1141 either operand is a NaN. */
1142 if (HONOR_SNANS (mode)
1143 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1146 /* Don't perform operation if it would raise a division
1147 by zero exception. */
1148 if (code == RDIV_EXPR
1149 && REAL_VALUES_EQUAL (d2, dconst0)
1150 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1153 /* If either operand is a NaN, just return it. Otherwise, set up
1154 for floating-point trap; we return an overflow. */
1155 if (REAL_VALUE_ISNAN (d1))
1157 else if (REAL_VALUE_ISNAN (d2))
1160 inexact = real_arithmetic (&value, code, &d1, &d2);
1161 real_convert (&result, mode, &value);
1163 /* Don't constant fold this floating point operation if
1164 the result has overflowed and flag_trapping_math. */
1165 if (flag_trapping_math
1166 && MODE_HAS_INFINITIES (mode)
1167 && REAL_VALUE_ISINF (result)
1168 && !REAL_VALUE_ISINF (d1)
1169 && !REAL_VALUE_ISINF (d2))
1172 /* Don't constant fold this floating point operation if the
1173 result may dependent upon the run-time rounding mode and
1174 flag_rounding_math is set, or if GCC's software emulation
1175 is unable to accurately represent the result. */
1176 if ((flag_rounding_math
1177 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1178 && (inexact || !real_identical (&result, &value)))
1181 t = build_real (type, result);
1183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1187 if (TREE_CODE (arg1) == FIXED_CST)
1189 FIXED_VALUE_TYPE f1;
1190 FIXED_VALUE_TYPE f2;
1191 FIXED_VALUE_TYPE result;
1196 /* The following codes are handled by fixed_arithmetic. */
1202 case TRUNC_DIV_EXPR:
1203 f2 = TREE_FIXED_CST (arg2);
1208 f2.data.high = TREE_INT_CST_HIGH (arg2);
1209 f2.data.low = TREE_INT_CST_LOW (arg2);
1217 f1 = TREE_FIXED_CST (arg1);
1218 type = TREE_TYPE (arg1);
1219 sat_p = TYPE_SATURATING (type);
1220 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1221 t = build_fixed (type, result);
1222 /* Propagate overflow flags. */
1223 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1224 TREE_OVERFLOW (t) = 1;
1228 if (TREE_CODE (arg1) == COMPLEX_CST)
1230 tree type = TREE_TYPE (arg1);
1231 tree r1 = TREE_REALPART (arg1);
1232 tree i1 = TREE_IMAGPART (arg1);
1233 tree r2 = TREE_REALPART (arg2);
1234 tree i2 = TREE_IMAGPART (arg2);
1241 real = const_binop (code, r1, r2);
1242 imag = const_binop (code, i1, i2);
1246 if (COMPLEX_FLOAT_TYPE_P (type))
1247 return do_mpc_arg2 (arg1, arg2, type,
1248 /* do_nonfinite= */ folding_initializer,
1251 real = const_binop (MINUS_EXPR,
1252 const_binop (MULT_EXPR, r1, r2),
1253 const_binop (MULT_EXPR, i1, i2));
1254 imag = const_binop (PLUS_EXPR,
1255 const_binop (MULT_EXPR, r1, i2),
1256 const_binop (MULT_EXPR, i1, r2));
1260 if (COMPLEX_FLOAT_TYPE_P (type))
1261 return do_mpc_arg2 (arg1, arg2, type,
1262 /* do_nonfinite= */ folding_initializer,
1265 case TRUNC_DIV_EXPR:
1267 case FLOOR_DIV_EXPR:
1268 case ROUND_DIV_EXPR:
1269 if (flag_complex_method == 0)
1271 /* Keep this algorithm in sync with
1272 tree-complex.c:expand_complex_div_straight().
1274 Expand complex division to scalars, straightforward algorithm.
1275 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1279 = const_binop (PLUS_EXPR,
1280 const_binop (MULT_EXPR, r2, r2),
1281 const_binop (MULT_EXPR, i2, i2));
1283 = const_binop (PLUS_EXPR,
1284 const_binop (MULT_EXPR, r1, r2),
1285 const_binop (MULT_EXPR, i1, i2));
1287 = const_binop (MINUS_EXPR,
1288 const_binop (MULT_EXPR, i1, r2),
1289 const_binop (MULT_EXPR, r1, i2));
1291 real = const_binop (code, t1, magsquared);
1292 imag = const_binop (code, t2, magsquared);
1296 /* Keep this algorithm in sync with
1297 tree-complex.c:expand_complex_div_wide().
1299 Expand complex division to scalars, modified algorithm to minimize
1300 overflow with wide input ranges. */
1301 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1302 fold_abs_const (r2, TREE_TYPE (type)),
1303 fold_abs_const (i2, TREE_TYPE (type)));
1305 if (integer_nonzerop (compare))
1307 /* In the TRUE branch, we compute
1309 div = (br * ratio) + bi;
1310 tr = (ar * ratio) + ai;
1311 ti = (ai * ratio) - ar;
1314 tree ratio = const_binop (code, r2, i2);
1315 tree div = const_binop (PLUS_EXPR, i2,
1316 const_binop (MULT_EXPR, r2, ratio));
1317 real = const_binop (MULT_EXPR, r1, ratio);
1318 real = const_binop (PLUS_EXPR, real, i1);
1319 real = const_binop (code, real, div);
1321 imag = const_binop (MULT_EXPR, i1, ratio);
1322 imag = const_binop (MINUS_EXPR, imag, r1);
1323 imag = const_binop (code, imag, div);
1327 /* In the FALSE branch, we compute
1329 divisor = (d * ratio) + c;
1330 tr = (b * ratio) + a;
1331 ti = b - (a * ratio);
1334 tree ratio = const_binop (code, i2, r2);
1335 tree div = const_binop (PLUS_EXPR, r2,
1336 const_binop (MULT_EXPR, i2, ratio));
1338 real = const_binop (MULT_EXPR, i1, ratio);
1339 real = const_binop (PLUS_EXPR, real, r1);
1340 real = const_binop (code, real, div);
1342 imag = const_binop (MULT_EXPR, r1, ratio);
1343 imag = const_binop (MINUS_EXPR, i1, imag);
1344 imag = const_binop (code, imag, div);
1354 return build_complex (type, real, imag);
1357 if (TREE_CODE (arg1) == VECTOR_CST)
1359 tree type = TREE_TYPE(arg1);
1360 int count = TYPE_VECTOR_SUBPARTS (type), i;
1361 tree elements1, elements2, list = NULL_TREE;
1363 if(TREE_CODE(arg2) != VECTOR_CST)
1366 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1367 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1369 for (i = 0; i < count; i++)
1371 tree elem1, elem2, elem;
1373 /* The trailing elements can be empty and should be treated as 0 */
1375 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1378 elem1 = TREE_VALUE(elements1);
1379 elements1 = TREE_CHAIN (elements1);
1383 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1386 elem2 = TREE_VALUE(elements2);
1387 elements2 = TREE_CHAIN (elements2);
1390 elem = const_binop (code, elem1, elem2);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if(elem == NULL_TREE)
1397 list = tree_cons (NULL_TREE, elem, list);
1399 return build_vector(type, nreverse(list));
1404 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1405 indicates which particular sizetype to create. */
1408 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1410 return build_int_cst (sizetype_tab[(int) kind], number);
1413 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1414 is a tree code. The type of the result is taken from the operands.
1415 Both must be equivalent integer types, ala int_binop_types_match_p.
1416 If the operands are constant, so is the result. */
1419 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1421 tree type = TREE_TYPE (arg0);
1423 if (arg0 == error_mark_node || arg1 == error_mark_node)
1424 return error_mark_node;
1426 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1429 /* Handle the special case of two integer constants faster. */
1430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1432 /* And some specific cases even faster than that. */
1433 if (code == PLUS_EXPR)
1435 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1437 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1440 else if (code == MINUS_EXPR)
1442 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1445 else if (code == MULT_EXPR)
1447 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1451 /* Handle general case of two integer constants. */
1452 return int_const_binop (code, arg0, arg1);
1455 return fold_build2_loc (loc, code, type, arg0, arg1);
1458 /* Given two values, either both of sizetype or both of bitsizetype,
1459 compute the difference between the two values. Return the value
1460 in signed type corresponding to the type of the operands. */
1463 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1465 tree type = TREE_TYPE (arg0);
1468 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1471 /* If the type is already signed, just do the simple thing. */
1472 if (!TYPE_UNSIGNED (type))
1473 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1475 if (type == sizetype)
1477 else if (type == bitsizetype)
1478 ctype = sbitsizetype;
1480 ctype = signed_type_for (type);
1482 /* If either operand is not a constant, do the conversions to the signed
1483 type and subtract. The hardware will do the right thing with any
1484 overflow in the subtraction. */
1485 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1486 return size_binop_loc (loc, MINUS_EXPR,
1487 fold_convert_loc (loc, ctype, arg0),
1488 fold_convert_loc (loc, ctype, arg1));
1490 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1491 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1492 overflow) and negate (which can't either). Special-case a result
1493 of zero while we're here. */
1494 if (tree_int_cst_equal (arg0, arg1))
1495 return build_int_cst (ctype, 0);
1496 else if (tree_int_cst_lt (arg1, arg0))
1497 return fold_convert_loc (loc, ctype,
1498 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1500 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1501 fold_convert_loc (loc, ctype,
1502 size_binop_loc (loc,
1507 /* A subroutine of fold_convert_const handling conversions of an
1508 INTEGER_CST to another integer type. */
1511 fold_convert_const_int_from_int (tree type, const_tree arg1)
1515 /* Given an integer constant, make new constant with new type,
1516 appropriately sign-extended or truncated. */
1517 t = force_fit_type_double (type, tree_to_double_int (arg1),
1518 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1519 (TREE_INT_CST_HIGH (arg1) < 0
1520 && (TYPE_UNSIGNED (type)
1521 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1522 | TREE_OVERFLOW (arg1));
1527 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1528 to an integer type. */
1531 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1536 /* The following code implements the floating point to integer
1537 conversion rules required by the Java Language Specification,
1538 that IEEE NaNs are mapped to zero and values that overflow
1539 the target precision saturate, i.e. values greater than
1540 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1541 are mapped to INT_MIN. These semantics are allowed by the
1542 C and C++ standards that simply state that the behavior of
1543 FP-to-integer conversion is unspecified upon overflow. */
1547 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1551 case FIX_TRUNC_EXPR:
1552 real_trunc (&r, VOIDmode, &x);
1559 /* If R is NaN, return zero and show we have an overflow. */
1560 if (REAL_VALUE_ISNAN (r))
1563 val = double_int_zero;
1566 /* See if R is less than the lower bound or greater than the
1571 tree lt = TYPE_MIN_VALUE (type);
1572 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1573 if (REAL_VALUES_LESS (r, l))
1576 val = tree_to_double_int (lt);
1582 tree ut = TYPE_MAX_VALUE (type);
1585 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1586 if (REAL_VALUES_LESS (u, r))
1589 val = tree_to_double_int (ut);
1595 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1597 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1601 /* A subroutine of fold_convert_const handling conversions of a
1602 FIXED_CST to an integer type. */
1605 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1608 double_int temp, temp_trunc;
1611 /* Right shift FIXED_CST to temp by fbit. */
1612 temp = TREE_FIXED_CST (arg1).data;
1613 mode = TREE_FIXED_CST (arg1).mode;
1614 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1616 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1617 HOST_BITS_PER_DOUBLE_INT,
1618 SIGNED_FIXED_POINT_MODE_P (mode));
1620 /* Left shift temp to temp_trunc by fbit. */
1621 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1622 HOST_BITS_PER_DOUBLE_INT,
1623 SIGNED_FIXED_POINT_MODE_P (mode));
1627 temp = double_int_zero;
1628 temp_trunc = double_int_zero;
1631 /* If FIXED_CST is negative, we need to round the value toward 0.
1632 By checking if the fractional bits are not zero to add 1 to temp. */
1633 if (SIGNED_FIXED_POINT_MODE_P (mode)
1634 && double_int_negative_p (temp_trunc)
1635 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1636 temp = double_int_add (temp, double_int_one);
1638 /* Given a fixed-point constant, make new constant with new type,
1639 appropriately sign-extended or truncated. */
1640 t = force_fit_type_double (type, temp, -1,
1641 (double_int_negative_p (temp)
1642 && (TYPE_UNSIGNED (type)
1643 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1644 | TREE_OVERFLOW (arg1));
1649 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1650 to another floating point type. */
1653 fold_convert_const_real_from_real (tree type, const_tree arg1)
1655 REAL_VALUE_TYPE value;
1658 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1659 t = build_real (type, value);
1661 /* If converting an infinity or NAN to a representation that doesn't
1662 have one, set the overflow bit so that we can produce some kind of
1663 error message at the appropriate point if necessary. It's not the
1664 most user-friendly message, but it's better than nothing. */
1665 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1669 && !MODE_HAS_NANS (TYPE_MODE (type)))
1670 TREE_OVERFLOW (t) = 1;
1671 /* Regular overflow, conversion produced an infinity in a mode that
1672 can't represent them. */
1673 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1674 && REAL_VALUE_ISINF (value)
1675 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1676 TREE_OVERFLOW (t) = 1;
1678 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1682 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1683 to a floating point type. */
1686 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1688 REAL_VALUE_TYPE value;
1691 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1692 t = build_real (type, value);
1694 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1698 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1699 to another fixed-point type. */
1702 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1704 FIXED_VALUE_TYPE value;
1708 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1709 TYPE_SATURATING (type));
1710 t = build_fixed (type, value);
1712 /* Propagate overflow flags. */
1713 if (overflow_p | TREE_OVERFLOW (arg1))
1714 TREE_OVERFLOW (t) = 1;
1718 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1719 to a fixed-point type. */
1722 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1724 FIXED_VALUE_TYPE value;
1728 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1729 TREE_INT_CST (arg1),
1730 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1731 TYPE_SATURATING (type));
1732 t = build_fixed (type, value);
1734 /* Propagate overflow flags. */
1735 if (overflow_p | TREE_OVERFLOW (arg1))
1736 TREE_OVERFLOW (t) = 1;
1740 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1741 to a fixed-point type. */
1744 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1746 FIXED_VALUE_TYPE value;
1750 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1751 &TREE_REAL_CST (arg1),
1752 TYPE_SATURATING (type));
1753 t = build_fixed (type, value);
1755 /* Propagate overflow flags. */
1756 if (overflow_p | TREE_OVERFLOW (arg1))
1757 TREE_OVERFLOW (t) = 1;
1761 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1762 type TYPE. If no simplification can be done return NULL_TREE. */
1765 fold_convert_const (enum tree_code code, tree type, tree arg1)
1767 if (TREE_TYPE (arg1) == type)
1770 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1771 || TREE_CODE (type) == OFFSET_TYPE)
1773 if (TREE_CODE (arg1) == INTEGER_CST)
1774 return fold_convert_const_int_from_int (type, arg1);
1775 else if (TREE_CODE (arg1) == REAL_CST)
1776 return fold_convert_const_int_from_real (code, type, arg1);
1777 else if (TREE_CODE (arg1) == FIXED_CST)
1778 return fold_convert_const_int_from_fixed (type, arg1);
1780 else if (TREE_CODE (type) == REAL_TYPE)
1782 if (TREE_CODE (arg1) == INTEGER_CST)
1783 return build_real_from_int_cst (type, arg1);
1784 else if (TREE_CODE (arg1) == REAL_CST)
1785 return fold_convert_const_real_from_real (type, arg1);
1786 else if (TREE_CODE (arg1) == FIXED_CST)
1787 return fold_convert_const_real_from_fixed (type, arg1);
1789 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1791 if (TREE_CODE (arg1) == FIXED_CST)
1792 return fold_convert_const_fixed_from_fixed (type, arg1);
1793 else if (TREE_CODE (arg1) == INTEGER_CST)
1794 return fold_convert_const_fixed_from_int (type, arg1);
1795 else if (TREE_CODE (arg1) == REAL_CST)
1796 return fold_convert_const_fixed_from_real (type, arg1);
1801 /* Construct a vector of zero elements of vector type TYPE. */
1804 build_zero_vector (tree type)
1808 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1809 return build_vector_from_val (type, t);
1812 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1815 fold_convertible_p (const_tree type, const_tree arg)
1817 tree orig = TREE_TYPE (arg);
1822 if (TREE_CODE (arg) == ERROR_MARK
1823 || TREE_CODE (type) == ERROR_MARK
1824 || TREE_CODE (orig) == ERROR_MARK)
1827 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1830 switch (TREE_CODE (type))
1832 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1833 case POINTER_TYPE: case REFERENCE_TYPE:
1835 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1836 || TREE_CODE (orig) == OFFSET_TYPE)
1838 return (TREE_CODE (orig) == VECTOR_TYPE
1839 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1842 case FIXED_POINT_TYPE:
1846 return TREE_CODE (type) == TREE_CODE (orig);
1853 /* Convert expression ARG to type TYPE. Used by the middle-end for
1854 simple conversions in preference to calling the front-end's convert. */
1857 fold_convert_loc (location_t loc, tree type, tree arg)
1859 tree orig = TREE_TYPE (arg);
1865 if (TREE_CODE (arg) == ERROR_MARK
1866 || TREE_CODE (type) == ERROR_MARK
1867 || TREE_CODE (orig) == ERROR_MARK)
1868 return error_mark_node;
1870 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1871 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1873 switch (TREE_CODE (type))
1876 case REFERENCE_TYPE:
1877 /* Handle conversions between pointers to different address spaces. */
1878 if (POINTER_TYPE_P (orig)
1879 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1880 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1881 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1884 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1886 if (TREE_CODE (arg) == INTEGER_CST)
1888 tem = fold_convert_const (NOP_EXPR, type, arg);
1889 if (tem != NULL_TREE)
1892 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1893 || TREE_CODE (orig) == OFFSET_TYPE)
1894 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 if (TREE_CODE (orig) == COMPLEX_TYPE)
1896 return fold_convert_loc (loc, type,
1897 fold_build1_loc (loc, REALPART_EXPR,
1898 TREE_TYPE (orig), arg));
1899 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1900 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1901 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1904 if (TREE_CODE (arg) == INTEGER_CST)
1906 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1910 else if (TREE_CODE (arg) == REAL_CST)
1912 tem = fold_convert_const (NOP_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1916 else if (TREE_CODE (arg) == FIXED_CST)
1918 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1923 switch (TREE_CODE (orig))
1926 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1927 case POINTER_TYPE: case REFERENCE_TYPE:
1928 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1931 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1933 case FIXED_POINT_TYPE:
1934 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1937 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1938 return fold_convert_loc (loc, type, tem);
1944 case FIXED_POINT_TYPE:
1945 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1946 || TREE_CODE (arg) == REAL_CST)
1948 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1949 if (tem != NULL_TREE)
1950 goto fold_convert_exit;
1953 switch (TREE_CODE (orig))
1955 case FIXED_POINT_TYPE:
1960 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1963 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1964 return fold_convert_loc (loc, type, tem);
1971 switch (TREE_CODE (orig))
1974 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1975 case POINTER_TYPE: case REFERENCE_TYPE:
1977 case FIXED_POINT_TYPE:
1978 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1979 fold_convert_loc (loc, TREE_TYPE (type), arg),
1980 fold_convert_loc (loc, TREE_TYPE (type),
1981 integer_zero_node));
1986 if (TREE_CODE (arg) == COMPLEX_EXPR)
1988 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1989 TREE_OPERAND (arg, 0));
1990 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1991 TREE_OPERAND (arg, 1));
1992 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1995 arg = save_expr (arg);
1996 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1997 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1998 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1999 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2000 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2008 if (integer_zerop (arg))
2009 return build_zero_vector (type);
2010 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2011 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2012 || TREE_CODE (orig) == VECTOR_TYPE);
2013 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2016 tem = fold_ignored_result (arg);
2017 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2023 protected_set_expr_location_unshare (tem, loc);
2027 /* Return false if expr can be assumed not to be an lvalue, true
2031 maybe_lvalue_p (const_tree x)
2033 /* We only need to wrap lvalue tree codes. */
2034 switch (TREE_CODE (x))
2047 case ARRAY_RANGE_REF:
2053 case PREINCREMENT_EXPR:
2054 case PREDECREMENT_EXPR:
2056 case TRY_CATCH_EXPR:
2057 case WITH_CLEANUP_EXPR:
2066 /* Assume the worst for front-end tree codes. */
2067 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2075 /* Return an expr equal to X but certainly not valid as an lvalue. */
2078 non_lvalue_loc (location_t loc, tree x)
2080 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2085 if (! maybe_lvalue_p (x))
2087 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2090 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2091 Zero means allow extended lvalues. */
2093 int pedantic_lvalues;
2095 /* When pedantic, return an expr equal to X but certainly not valid as a
2096 pedantic lvalue. Otherwise, return X. */
2099 pedantic_non_lvalue_loc (location_t loc, tree x)
2101 if (pedantic_lvalues)
2102 return non_lvalue_loc (loc, x);
2104 return protected_set_expr_location_unshare (x, loc);
2107 /* Given a tree comparison code, return the code that is the logical inverse
2108 of the given code. It is not safe to do this for floating-point
2109 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2110 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2113 invert_tree_comparison (enum tree_code code, bool honor_nans)
2115 if (honor_nans && flag_trapping_math)
2125 return honor_nans ? UNLE_EXPR : LE_EXPR;
2127 return honor_nans ? UNLT_EXPR : LT_EXPR;
2129 return honor_nans ? UNGE_EXPR : GE_EXPR;
2131 return honor_nans ? UNGT_EXPR : GT_EXPR;
2145 return UNORDERED_EXPR;
2146 case UNORDERED_EXPR:
2147 return ORDERED_EXPR;
2153 /* Similar, but return the comparison that results if the operands are
2154 swapped. This is safe for floating-point. */
2157 swap_tree_comparison (enum tree_code code)
2164 case UNORDERED_EXPR:
2190 /* Convert a comparison tree code from an enum tree_code representation
2191 into a compcode bit-based encoding. This function is the inverse of
2192 compcode_to_comparison. */
2194 static enum comparison_code
2195 comparison_to_compcode (enum tree_code code)
2212 return COMPCODE_ORD;
2213 case UNORDERED_EXPR:
2214 return COMPCODE_UNORD;
2216 return COMPCODE_UNLT;
2218 return COMPCODE_UNEQ;
2220 return COMPCODE_UNLE;
2222 return COMPCODE_UNGT;
2224 return COMPCODE_LTGT;
2226 return COMPCODE_UNGE;
2232 /* Convert a compcode bit-based encoding of a comparison operator back
2233 to GCC's enum tree_code representation. This function is the
2234 inverse of comparison_to_compcode. */
2236 static enum tree_code
2237 compcode_to_comparison (enum comparison_code code)
2254 return ORDERED_EXPR;
2255 case COMPCODE_UNORD:
2256 return UNORDERED_EXPR;
2274 /* Return a tree for the comparison which is the combination of
2275 doing the AND or OR (depending on CODE) of the two operations LCODE
2276 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2277 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2278 if this makes the transformation invalid. */
2281 combine_comparisons (location_t loc,
2282 enum tree_code code, enum tree_code lcode,
2283 enum tree_code rcode, tree truth_type,
2284 tree ll_arg, tree lr_arg)
2286 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2287 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2288 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2293 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2294 compcode = lcompcode & rcompcode;
2297 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2298 compcode = lcompcode | rcompcode;
2307 /* Eliminate unordered comparisons, as well as LTGT and ORD
2308 which are not used unless the mode has NaNs. */
2309 compcode &= ~COMPCODE_UNORD;
2310 if (compcode == COMPCODE_LTGT)
2311 compcode = COMPCODE_NE;
2312 else if (compcode == COMPCODE_ORD)
2313 compcode = COMPCODE_TRUE;
2315 else if (flag_trapping_math)
2317 /* Check that the original operation and the optimized ones will trap
2318 under the same condition. */
2319 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2320 && (lcompcode != COMPCODE_EQ)
2321 && (lcompcode != COMPCODE_ORD);
2322 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2323 && (rcompcode != COMPCODE_EQ)
2324 && (rcompcode != COMPCODE_ORD);
2325 bool trap = (compcode & COMPCODE_UNORD) == 0
2326 && (compcode != COMPCODE_EQ)
2327 && (compcode != COMPCODE_ORD);
2329 /* In a short-circuited boolean expression the LHS might be
2330 such that the RHS, if evaluated, will never trap. For
2331 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2332 if neither x nor y is NaN. (This is a mixed blessing: for
2333 example, the expression above will never trap, hence
2334 optimizing it to x < y would be invalid). */
2335 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2336 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2339 /* If the comparison was short-circuited, and only the RHS
2340 trapped, we may now generate a spurious trap. */
2342 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2345 /* If we changed the conditions that cause a trap, we lose. */
2346 if ((ltrap || rtrap) != trap)
2350 if (compcode == COMPCODE_TRUE)
2351 return constant_boolean_node (true, truth_type);
2352 else if (compcode == COMPCODE_FALSE)
2353 return constant_boolean_node (false, truth_type);
2356 enum tree_code tcode;
2358 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2359 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2363 /* Return nonzero if two operands (typically of the same tree node)
2364 are necessarily equal. If either argument has side-effects this
2365 function returns zero. FLAGS modifies behavior as follows:
2367 If OEP_ONLY_CONST is set, only return nonzero for constants.
2368 This function tests whether the operands are indistinguishable;
2369 it does not test whether they are equal using C's == operation.
2370 The distinction is important for IEEE floating point, because
2371 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2372 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2374 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2375 even though it may hold multiple values during a function.
2376 This is because a GCC tree node guarantees that nothing else is
2377 executed between the evaluation of its "operands" (which may often
2378 be evaluated in arbitrary order). Hence if the operands themselves
2379 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2380 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2381 unset means assuming isochronic (or instantaneous) tree equivalence.
2382 Unless comparing arbitrary expression trees, such as from different
2383 statements, this flag can usually be left unset.
2385 If OEP_PURE_SAME is set, then pure functions with identical arguments
2386 are considered the same. It is used when the caller has other ways
2387 to ensure that global memory is unchanged in between. */
2390 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2392 /* If either is ERROR_MARK, they aren't equal. */
2393 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2394 || TREE_TYPE (arg0) == error_mark_node
2395 || TREE_TYPE (arg1) == error_mark_node)
2398 /* Similar, if either does not have a type (like a released SSA name),
2399 they aren't equal. */
2400 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2403 /* Check equality of integer constants before bailing out due to
2404 precision differences. */
2405 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2406 return tree_int_cst_equal (arg0, arg1);
2408 /* If both types don't have the same signedness, then we can't consider
2409 them equal. We must check this before the STRIP_NOPS calls
2410 because they may change the signedness of the arguments. As pointers
2411 strictly don't have a signedness, require either two pointers or
2412 two non-pointers as well. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2414 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2417 /* We cannot consider pointers to different address space equal. */
2418 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2419 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2420 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2423 /* If both types don't have the same precision, then it is not safe
2425 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2431 /* In case both args are comparisons but with different comparison
2432 code, try to swap the comparison operands of one arg to produce
2433 a match and compare that variant. */
2434 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2435 && COMPARISON_CLASS_P (arg0)
2436 && COMPARISON_CLASS_P (arg1))
2438 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2440 if (TREE_CODE (arg0) == swap_code)
2441 return operand_equal_p (TREE_OPERAND (arg0, 0),
2442 TREE_OPERAND (arg1, 1), flags)
2443 && operand_equal_p (TREE_OPERAND (arg0, 1),
2444 TREE_OPERAND (arg1, 0), flags);
2447 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2448 /* This is needed for conversions and for COMPONENT_REF.
2449 Might as well play it safe and always test this. */
2450 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2451 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2452 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2455 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2456 We don't care about side effects in that case because the SAVE_EXPR
2457 takes care of that for us. In all other cases, two expressions are
2458 equal if they have no side effects. If we have two identical
2459 expressions with side effects that should be treated the same due
2460 to the only side effects being identical SAVE_EXPR's, that will
2461 be detected in the recursive calls below.
2462 If we are taking an invariant address of two identical objects
2463 they are necessarily equal as well. */
2464 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2465 && (TREE_CODE (arg0) == SAVE_EXPR
2466 || (flags & OEP_CONSTANT_ADDRESS_OF)
2467 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2470 /* Next handle constant cases, those for which we can return 1 even
2471 if ONLY_CONST is set. */
2472 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2473 switch (TREE_CODE (arg0))
2476 return tree_int_cst_equal (arg0, arg1);
2479 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2480 TREE_FIXED_CST (arg1));
2483 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2484 TREE_REAL_CST (arg1)))
2488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2490 /* If we do not distinguish between signed and unsigned zero,
2491 consider them equal. */
2492 if (real_zerop (arg0) && real_zerop (arg1))
2501 v1 = TREE_VECTOR_CST_ELTS (arg0);
2502 v2 = TREE_VECTOR_CST_ELTS (arg1);
2505 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2508 v1 = TREE_CHAIN (v1);
2509 v2 = TREE_CHAIN (v2);
2516 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2518 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2522 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2523 && ! memcmp (TREE_STRING_POINTER (arg0),
2524 TREE_STRING_POINTER (arg1),
2525 TREE_STRING_LENGTH (arg0)));
2528 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2529 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2530 ? OEP_CONSTANT_ADDRESS_OF : 0);
2535 if (flags & OEP_ONLY_CONST)
2538 /* Define macros to test an operand from arg0 and arg1 for equality and a
2539 variant that allows null and views null as being different from any
2540 non-null value. In the latter case, if either is null, the both
2541 must be; otherwise, do the normal comparison. */
2542 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2543 TREE_OPERAND (arg1, N), flags)
2545 #define OP_SAME_WITH_NULL(N) \
2546 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2547 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2549 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2552 /* Two conversions are equal only if signedness and modes match. */
2553 switch (TREE_CODE (arg0))
2556 case FIX_TRUNC_EXPR:
2557 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2558 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2568 case tcc_comparison:
2570 if (OP_SAME (0) && OP_SAME (1))
2573 /* For commutative ops, allow the other order. */
2574 return (commutative_tree_code (TREE_CODE (arg0))
2575 && operand_equal_p (TREE_OPERAND (arg0, 0),
2576 TREE_OPERAND (arg1, 1), flags)
2577 && operand_equal_p (TREE_OPERAND (arg0, 1),
2578 TREE_OPERAND (arg1, 0), flags));
2581 /* If either of the pointer (or reference) expressions we are
2582 dereferencing contain a side effect, these cannot be equal. */
2583 if (TREE_SIDE_EFFECTS (arg0)
2584 || TREE_SIDE_EFFECTS (arg1))
2587 switch (TREE_CODE (arg0))
2595 /* Require equal access sizes, and similar pointer types.
2596 We can have incomplete types for array references of
2597 variable-sized arrays from the Fortran frontent
2599 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2600 || (TYPE_SIZE (TREE_TYPE (arg0))
2601 && TYPE_SIZE (TREE_TYPE (arg1))
2602 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2603 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2604 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2605 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2606 && OP_SAME (0) && OP_SAME (1));
2609 case ARRAY_RANGE_REF:
2610 /* Operands 2 and 3 may be null.
2611 Compare the array index by value if it is constant first as we
2612 may have different types but same value here. */
2614 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2615 TREE_OPERAND (arg1, 1))
2617 && OP_SAME_WITH_NULL (2)
2618 && OP_SAME_WITH_NULL (3));
2621 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2622 may be NULL when we're called to compare MEM_EXPRs. */
2623 return OP_SAME_WITH_NULL (0)
2625 && OP_SAME_WITH_NULL (2);
2628 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2634 case tcc_expression:
2635 switch (TREE_CODE (arg0))
2638 case TRUTH_NOT_EXPR:
2641 case TRUTH_ANDIF_EXPR:
2642 case TRUTH_ORIF_EXPR:
2643 return OP_SAME (0) && OP_SAME (1);
2646 case WIDEN_MULT_PLUS_EXPR:
2647 case WIDEN_MULT_MINUS_EXPR:
2650 /* The multiplcation operands are commutative. */
2653 case TRUTH_AND_EXPR:
2655 case TRUTH_XOR_EXPR:
2656 if (OP_SAME (0) && OP_SAME (1))
2659 /* Otherwise take into account this is a commutative operation. */
2660 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2661 TREE_OPERAND (arg1, 1), flags)
2662 && operand_equal_p (TREE_OPERAND (arg0, 1),
2663 TREE_OPERAND (arg1, 0), flags));
2668 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2675 switch (TREE_CODE (arg0))
2678 /* If the CALL_EXPRs call different functions, then they
2679 clearly can not be equal. */
2680 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2685 unsigned int cef = call_expr_flags (arg0);
2686 if (flags & OEP_PURE_SAME)
2687 cef &= ECF_CONST | ECF_PURE;
2694 /* Now see if all the arguments are the same. */
2696 const_call_expr_arg_iterator iter0, iter1;
2698 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2699 a1 = first_const_call_expr_arg (arg1, &iter1);
2701 a0 = next_const_call_expr_arg (&iter0),
2702 a1 = next_const_call_expr_arg (&iter1))
2703 if (! operand_equal_p (a0, a1, flags))
2706 /* If we get here and both argument lists are exhausted
2707 then the CALL_EXPRs are equal. */
2708 return ! (a0 || a1);
2714 case tcc_declaration:
2715 /* Consider __builtin_sqrt equal to sqrt. */
2716 return (TREE_CODE (arg0) == FUNCTION_DECL
2717 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2718 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2719 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2726 #undef OP_SAME_WITH_NULL
2729 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2730 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2732 When in doubt, return 0. */
2735 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2737 int unsignedp1, unsignedpo;
2738 tree primarg0, primarg1, primother;
2739 unsigned int correct_width;
2741 if (operand_equal_p (arg0, arg1, 0))
2744 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2745 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2748 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2749 and see if the inner values are the same. This removes any
2750 signedness comparison, which doesn't matter here. */
2751 primarg0 = arg0, primarg1 = arg1;
2752 STRIP_NOPS (primarg0);
2753 STRIP_NOPS (primarg1);
2754 if (operand_equal_p (primarg0, primarg1, 0))
2757 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2758 actual comparison operand, ARG0.
2760 First throw away any conversions to wider types
2761 already present in the operands. */
2763 primarg1 = get_narrower (arg1, &unsignedp1);
2764 primother = get_narrower (other, &unsignedpo);
2766 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2767 if (unsignedp1 == unsignedpo
2768 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2769 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2771 tree type = TREE_TYPE (arg0);
2773 /* Make sure shorter operand is extended the right way
2774 to match the longer operand. */
2775 primarg1 = fold_convert (signed_or_unsigned_type_for
2776 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2778 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2785 /* See if ARG is an expression that is either a comparison or is performing
2786 arithmetic on comparisons. The comparisons must only be comparing
2787 two different values, which will be stored in *CVAL1 and *CVAL2; if
2788 they are nonzero it means that some operands have already been found.
2789 No variables may be used anywhere else in the expression except in the
2790 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2791 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2793 If this is true, return 1. Otherwise, return zero. */
2796 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2798 enum tree_code code = TREE_CODE (arg);
2799 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2801 /* We can handle some of the tcc_expression cases here. */
2802 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2804 else if (tclass == tcc_expression
2805 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2806 || code == COMPOUND_EXPR))
2807 tclass = tcc_binary;
2809 else if (tclass == tcc_expression && code == SAVE_EXPR
2810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2812 /* If we've already found a CVAL1 or CVAL2, this expression is
2813 two complex to handle. */
2814 if (*cval1 || *cval2)
2824 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2827 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2828 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2829 cval1, cval2, save_p));
2834 case tcc_expression:
2835 if (code == COND_EXPR)
2836 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2837 cval1, cval2, save_p)
2838 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2841 cval1, cval2, save_p));
2844 case tcc_comparison:
2845 /* First see if we can handle the first operand, then the second. For
2846 the second operand, we know *CVAL1 can't be zero. It must be that
2847 one side of the comparison is each of the values; test for the
2848 case where this isn't true by failing if the two operands
2851 if (operand_equal_p (TREE_OPERAND (arg, 0),
2852 TREE_OPERAND (arg, 1), 0))
2856 *cval1 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2859 else if (*cval2 == 0)
2860 *cval2 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2866 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2868 else if (*cval2 == 0)
2869 *cval2 = TREE_OPERAND (arg, 1);
2870 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2882 /* ARG is a tree that is known to contain just arithmetic operations and
2883 comparisons. Evaluate the operations in the tree substituting NEW0 for
2884 any occurrence of OLD0 as an operand of a comparison and likewise for
2888 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2889 tree old1, tree new1)
2891 tree type = TREE_TYPE (arg);
2892 enum tree_code code = TREE_CODE (arg);
2893 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2895 /* We can handle some of the tcc_expression cases here. */
2896 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2898 else if (tclass == tcc_expression
2899 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2900 tclass = tcc_binary;
2905 return fold_build1_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1));
2910 return fold_build2_loc (loc, code, type,
2911 eval_subst (loc, TREE_OPERAND (arg, 0),
2912 old0, new0, old1, new1),
2913 eval_subst (loc, TREE_OPERAND (arg, 1),
2914 old0, new0, old1, new1));
2916 case tcc_expression:
2920 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2924 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2928 return fold_build3_loc (loc, code, type,
2929 eval_subst (loc, TREE_OPERAND (arg, 0),
2930 old0, new0, old1, new1),
2931 eval_subst (loc, TREE_OPERAND (arg, 1),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 2),
2934 old0, new0, old1, new1));
2938 /* Fall through - ??? */
2940 case tcc_comparison:
2942 tree arg0 = TREE_OPERAND (arg, 0);
2943 tree arg1 = TREE_OPERAND (arg, 1);
2945 /* We need to check both for exact equality and tree equality. The
2946 former will be true if the operand has a side-effect. In that
2947 case, we know the operand occurred exactly once. */
2949 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2951 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2954 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2956 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2959 return fold_build2_loc (loc, code, type, arg0, arg1);
2967 /* Return a tree for the case when the result of an expression is RESULT
2968 converted to TYPE and OMITTED was previously an operand of the expression
2969 but is now not needed (e.g., we folded OMITTED * 0).
2971 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2972 the conversion of RESULT to TYPE. */
2975 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2977 tree t = fold_convert_loc (loc, type, result);
2979 /* If the resulting operand is an empty statement, just return the omitted
2980 statement casted to void. */
2981 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2982 return build1_loc (loc, NOP_EXPR, void_type_node,
2983 fold_ignored_result (omitted));
2985 if (TREE_SIDE_EFFECTS (omitted))
2986 return build2_loc (loc, COMPOUND_EXPR, type,
2987 fold_ignored_result (omitted), t);
2989 return non_lvalue_loc (loc, t);
2992 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2995 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2998 tree t = fold_convert_loc (loc, type, result);
3000 /* If the resulting operand is an empty statement, just return the omitted
3001 statement casted to void. */
3002 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3003 return build1_loc (loc, NOP_EXPR, void_type_node,
3004 fold_ignored_result (omitted));
3006 if (TREE_SIDE_EFFECTS (omitted))
3007 return build2_loc (loc, COMPOUND_EXPR, type,
3008 fold_ignored_result (omitted), t);
3010 return pedantic_non_lvalue_loc (loc, t);
3013 /* Return a tree for the case when the result of an expression is RESULT
3014 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3015 of the expression but are now not needed.
3017 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3018 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3019 evaluated before OMITTED2. Otherwise, if neither has side effects,
3020 just do the conversion of RESULT to TYPE. */
3023 omit_two_operands_loc (location_t loc, tree type, tree result,
3024 tree omitted1, tree omitted2)
3026 tree t = fold_convert_loc (loc, type, result);
3028 if (TREE_SIDE_EFFECTS (omitted2))
3029 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3030 if (TREE_SIDE_EFFECTS (omitted1))
3031 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3033 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3037 /* Return a simplified tree node for the truth-negation of ARG. This
3038 never alters ARG itself. We assume that ARG is an operation that
3039 returns a truth value (0 or 1).
3041 FIXME: one would think we would fold the result, but it causes
3042 problems with the dominator optimizer. */
3045 fold_truth_not_expr (location_t loc, tree arg)
3047 tree type = TREE_TYPE (arg);
3048 enum tree_code code = TREE_CODE (arg);
3049 location_t loc1, loc2;
3051 /* If this is a comparison, we can simply invert it, except for
3052 floating-point non-equality comparisons, in which case we just
3053 enclose a TRUTH_NOT_EXPR around what we have. */
3055 if (TREE_CODE_CLASS (code) == tcc_comparison)
3057 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3058 if (FLOAT_TYPE_P (op_type)
3059 && flag_trapping_math
3060 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3061 && code != NE_EXPR && code != EQ_EXPR)
3064 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3065 if (code == ERROR_MARK)
3068 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3069 TREE_OPERAND (arg, 1));
3075 return constant_boolean_node (integer_zerop (arg), type);
3077 case TRUTH_AND_EXPR:
3078 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3079 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3080 return build2_loc (loc, TRUTH_OR_EXPR, type,
3081 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3082 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3086 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3087 return build2_loc (loc, TRUTH_AND_EXPR, type,
3088 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3089 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3091 case TRUTH_XOR_EXPR:
3092 /* Here we can invert either operand. We invert the first operand
3093 unless the second operand is a TRUTH_NOT_EXPR in which case our
3094 result is the XOR of the first operand with the inside of the
3095 negation of the second operand. */
3097 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3098 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3099 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3101 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3102 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3103 TREE_OPERAND (arg, 1));
3105 case TRUTH_ANDIF_EXPR:
3106 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3107 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3109 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3110 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3112 case TRUTH_ORIF_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3115 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3116 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3117 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3119 case TRUTH_NOT_EXPR:
3120 return TREE_OPERAND (arg, 0);
3124 tree arg1 = TREE_OPERAND (arg, 1);
3125 tree arg2 = TREE_OPERAND (arg, 2);
3127 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3130 /* A COND_EXPR may have a throw as one operand, which
3131 then has void type. Just leave void operands
3133 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3134 VOID_TYPE_P (TREE_TYPE (arg1))
3135 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3136 VOID_TYPE_P (TREE_TYPE (arg2))
3137 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3142 return build2_loc (loc, COMPOUND_EXPR, type,
3143 TREE_OPERAND (arg, 0),
3144 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3146 case NON_LVALUE_EXPR:
3147 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3148 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3151 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3152 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3154 /* ... fall through ... */
3157 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3158 return build1_loc (loc, TREE_CODE (arg), type,
3159 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3162 if (!integer_onep (TREE_OPERAND (arg, 1)))
3164 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3167 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3169 case CLEANUP_POINT_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3179 /* Return a simplified tree node for the truth-negation of ARG. This
3180 never alters ARG itself. We assume that ARG is an operation that
3181 returns a truth value (0 or 1).
3183 FIXME: one would think we would fold the result, but it causes
3184 problems with the dominator optimizer. */
3187 invert_truthvalue_loc (location_t loc, tree arg)
3191 if (TREE_CODE (arg) == ERROR_MARK)
3194 tem = fold_truth_not_expr (loc, arg);
3196 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3201 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3202 operands are another bit-wise operation with a common input. If so,
3203 distribute the bit operations to save an operation and possibly two if
3204 constants are involved. For example, convert
3205 (A | B) & (A | C) into A | (B & C)
3206 Further simplification will occur if B and C are constants.
3208 If this optimization cannot be done, 0 will be returned. */
3211 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3212 tree arg0, tree arg1)
3217 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3218 || TREE_CODE (arg0) == code
3219 || (TREE_CODE (arg0) != BIT_AND_EXPR
3220 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3223 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3225 common = TREE_OPERAND (arg0, 0);
3226 left = TREE_OPERAND (arg0, 1);
3227 right = TREE_OPERAND (arg1, 1);
3229 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3231 common = TREE_OPERAND (arg0, 0);
3232 left = TREE_OPERAND (arg0, 1);
3233 right = TREE_OPERAND (arg1, 0);
3235 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3237 common = TREE_OPERAND (arg0, 1);
3238 left = TREE_OPERAND (arg0, 0);
3239 right = TREE_OPERAND (arg1, 1);
3241 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3243 common = TREE_OPERAND (arg0, 1);
3244 left = TREE_OPERAND (arg0, 0);
3245 right = TREE_OPERAND (arg1, 0);
3250 common = fold_convert_loc (loc, type, common);
3251 left = fold_convert_loc (loc, type, left);
3252 right = fold_convert_loc (loc, type, right);
3253 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3254 fold_build2_loc (loc, code, type, left, right));
3257 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3258 with code CODE. This optimization is unsafe. */
3260 distribute_real_division (location_t loc, enum tree_code code, tree type,
3261 tree arg0, tree arg1)
3263 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3264 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3266 /* (A / C) +- (B / C) -> (A +- B) / C. */
3268 && operand_equal_p (TREE_OPERAND (arg0, 1),
3269 TREE_OPERAND (arg1, 1), 0))
3270 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3271 fold_build2_loc (loc, code, type,
3272 TREE_OPERAND (arg0, 0),
3273 TREE_OPERAND (arg1, 0)),
3274 TREE_OPERAND (arg0, 1));
3276 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3277 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3278 TREE_OPERAND (arg1, 0), 0)
3279 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3280 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3282 REAL_VALUE_TYPE r0, r1;
3283 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3284 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3286 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3288 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3289 real_arithmetic (&r0, code, &r0, &r1);
3290 return fold_build2_loc (loc, MULT_EXPR, type,
3291 TREE_OPERAND (arg0, 0),
3292 build_real (type, r0));
3298 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3299 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3302 make_bit_field_ref (location_t loc, tree inner, tree type,
3303 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3305 tree result, bftype;
3309 tree size = TYPE_SIZE (TREE_TYPE (inner));
3310 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3311 || POINTER_TYPE_P (TREE_TYPE (inner)))
3312 && host_integerp (size, 0)
3313 && tree_low_cst (size, 0) == bitsize)
3314 return fold_convert_loc (loc, type, inner);
3318 if (TYPE_PRECISION (bftype) != bitsize
3319 || TYPE_UNSIGNED (bftype) == !unsignedp)
3320 bftype = build_nonstandard_integer_type (bitsize, 0);
3322 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3323 size_int (bitsize), bitsize_int (bitpos));
3326 result = fold_convert_loc (loc, type, result);
3331 /* Optimize a bit-field compare.
3333 There are two cases: First is a compare against a constant and the
3334 second is a comparison of two items where the fields are at the same
3335 bit position relative to the start of a chunk (byte, halfword, word)
3336 large enough to contain it. In these cases we can avoid the shift
3337 implicit in bitfield extractions.
3339 For constants, we emit a compare of the shifted constant with the
3340 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3341 compared. For two fields at the same position, we do the ANDs with the
3342 similar mask and compare the result of the ANDs.
3344 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3345 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3346 are the left and right operands of the comparison, respectively.
3348 If the optimization described above can be done, we return the resulting
3349 tree. Otherwise we return zero. */
3352 optimize_bit_field_compare (location_t loc, enum tree_code code,
3353 tree compare_type, tree lhs, tree rhs)
3355 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3356 tree type = TREE_TYPE (lhs);
3357 tree signed_type, unsigned_type;
3358 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3359 enum machine_mode lmode, rmode, nmode;
3360 int lunsignedp, runsignedp;
3361 int lvolatilep = 0, rvolatilep = 0;
3362 tree linner, rinner = NULL_TREE;
3366 /* Get all the information about the extractions being done. If the bit size
3367 if the same as the size of the underlying object, we aren't doing an
3368 extraction at all and so can do nothing. We also don't want to
3369 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3370 then will no longer be able to replace it. */
3371 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3372 &lunsignedp, &lvolatilep, false);
3373 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3374 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3379 /* If this is not a constant, we can only do something if bit positions,
3380 sizes, and signedness are the same. */
3381 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3382 &runsignedp, &rvolatilep, false);
3384 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3385 || lunsignedp != runsignedp || offset != 0
3386 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3390 /* See if we can find a mode to refer to this field. We should be able to,
3391 but fail if we can't. */
3393 && GET_MODE_BITSIZE (lmode) > 0
3394 && flag_strict_volatile_bitfields > 0)
3397 nmode = get_best_mode (lbitsize, lbitpos,
3398 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3399 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3400 TYPE_ALIGN (TREE_TYPE (rinner))),
3401 word_mode, lvolatilep || rvolatilep);
3402 if (nmode == VOIDmode)
3405 /* Set signed and unsigned types of the precision of this mode for the
3407 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3408 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3410 /* Compute the bit position and size for the new reference and our offset
3411 within it. If the new reference is the same size as the original, we
3412 won't optimize anything, so return zero. */
3413 nbitsize = GET_MODE_BITSIZE (nmode);
3414 nbitpos = lbitpos & ~ (nbitsize - 1);
3416 if (nbitsize == lbitsize)
3419 if (BYTES_BIG_ENDIAN)
3420 lbitpos = nbitsize - lbitsize - lbitpos;
3422 /* Make the mask to be used against the extracted field. */
3423 mask = build_int_cst_type (unsigned_type, -1);
3424 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3425 mask = const_binop (RSHIFT_EXPR, mask,
3426 size_int (nbitsize - lbitsize - lbitpos));
3429 /* If not comparing with constant, just rework the comparison
3431 return fold_build2_loc (loc, code, compare_type,
3432 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3433 make_bit_field_ref (loc, linner,
3438 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3439 make_bit_field_ref (loc, rinner,
3445 /* Otherwise, we are handling the constant case. See if the constant is too
3446 big for the field. Warn and return a tree of for 0 (false) if so. We do
3447 this not only for its own sake, but to avoid having to test for this
3448 error case below. If we didn't, we might generate wrong code.
3450 For unsigned fields, the constant shifted right by the field length should
3451 be all zero. For signed fields, the high-order bits should agree with
3456 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3457 fold_convert_loc (loc,
3458 unsigned_type, rhs),
3459 size_int (lbitsize))))
3461 warning (0, "comparison is always %d due to width of bit-field",
3463 return constant_boolean_node (code == NE_EXPR, compare_type);
3468 tree tem = const_binop (RSHIFT_EXPR,
3469 fold_convert_loc (loc, signed_type, rhs),
3470 size_int (lbitsize - 1));
3471 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3473 warning (0, "comparison is always %d due to width of bit-field",
3475 return constant_boolean_node (code == NE_EXPR, compare_type);
3479 /* Single-bit compares should always be against zero. */
3480 if (lbitsize == 1 && ! integer_zerop (rhs))
3482 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3483 rhs = build_int_cst (type, 0);
3486 /* Make a new bitfield reference, shift the constant over the
3487 appropriate number of bits and mask it with the computed mask
3488 (in case this was a signed field). If we changed it, make a new one. */
3489 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3492 TREE_SIDE_EFFECTS (lhs) = 1;
3493 TREE_THIS_VOLATILE (lhs) = 1;
3496 rhs = const_binop (BIT_AND_EXPR,
3497 const_binop (LSHIFT_EXPR,
3498 fold_convert_loc (loc, unsigned_type, rhs),
3499 size_int (lbitpos)),
3502 lhs = build2_loc (loc, code, compare_type,
3503 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3507 /* Subroutine for fold_truthop: decode a field reference.
3509 If EXP is a comparison reference, we return the innermost reference.
3511 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3512 set to the starting bit number.
3514 If the innermost field can be completely contained in a mode-sized
3515 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3517 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3518 otherwise it is not changed.
3520 *PUNSIGNEDP is set to the signedness of the field.
3522 *PMASK is set to the mask used. This is either contained in a
3523 BIT_AND_EXPR or derived from the width of the field.
3525 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3527 Return 0 if this is not a component reference or is one that we can't
3528 do anything with. */
3531 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3532 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3533 int *punsignedp, int *pvolatilep,
3534 tree *pmask, tree *pand_mask)
3536 tree outer_type = 0;
3538 tree mask, inner, offset;
3540 unsigned int precision;
3542 /* All the optimizations using this function assume integer fields.
3543 There are problems with FP fields since the type_for_size call
3544 below can fail for, e.g., XFmode. */
3545 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3548 /* We are interested in the bare arrangement of bits, so strip everything
3549 that doesn't affect the machine mode. However, record the type of the
3550 outermost expression if it may matter below. */
3551 if (CONVERT_EXPR_P (exp)
3552 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3553 outer_type = TREE_TYPE (exp);
3556 if (TREE_CODE (exp) == BIT_AND_EXPR)
3558 and_mask = TREE_OPERAND (exp, 1);
3559 exp = TREE_OPERAND (exp, 0);
3560 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3561 if (TREE_CODE (and_mask) != INTEGER_CST)
3565 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3566 punsignedp, pvolatilep, false);
3567 if ((inner == exp && and_mask == 0)
3568 || *pbitsize < 0 || offset != 0
3569 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3572 /* If the number of bits in the reference is the same as the bitsize of
3573 the outer type, then the outer type gives the signedness. Otherwise
3574 (in case of a small bitfield) the signedness is unchanged. */
3575 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3576 *punsignedp = TYPE_UNSIGNED (outer_type);
3578 /* Compute the mask to access the bitfield. */
3579 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3580 precision = TYPE_PRECISION (unsigned_type);
3582 mask = build_int_cst_type (unsigned_type, -1);
3584 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3585 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3587 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3589 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3590 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3593 *pand_mask = and_mask;
3597 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3601 all_ones_mask_p (const_tree mask, int size)
3603 tree type = TREE_TYPE (mask);
3604 unsigned int precision = TYPE_PRECISION (type);
3607 tmask = build_int_cst_type (signed_type_for (type), -1);
3610 tree_int_cst_equal (mask,
3611 const_binop (RSHIFT_EXPR,
3612 const_binop (LSHIFT_EXPR, tmask,
3613 size_int (precision - size)),
3614 size_int (precision - size)));
3617 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3618 represents the sign bit of EXP's type. If EXP represents a sign
3619 or zero extension, also test VAL against the unextended type.
3620 The return value is the (sub)expression whose sign bit is VAL,
3621 or NULL_TREE otherwise. */
3624 sign_bit_p (tree exp, const_tree val)
3626 unsigned HOST_WIDE_INT mask_lo, lo;
3627 HOST_WIDE_INT mask_hi, hi;
3631 /* Tree EXP must have an integral type. */
3632 t = TREE_TYPE (exp);
3633 if (! INTEGRAL_TYPE_P (t))
3636 /* Tree VAL must be an integer constant. */
3637 if (TREE_CODE (val) != INTEGER_CST
3638 || TREE_OVERFLOW (val))
3641 width = TYPE_PRECISION (t);
3642 if (width > HOST_BITS_PER_WIDE_INT)
3644 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3647 mask_hi = ((unsigned HOST_WIDE_INT) -1
3648 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3654 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3657 mask_lo = ((unsigned HOST_WIDE_INT) -1
3658 >> (HOST_BITS_PER_WIDE_INT - width));
3661 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3662 treat VAL as if it were unsigned. */
3663 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3664 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3667 /* Handle extension from a narrower type. */
3668 if (TREE_CODE (exp) == NOP_EXPR
3669 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3670 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3675 /* Subroutine for fold_truthop: determine if an operand is simple enough
3676 to be evaluated unconditionally. */
3679 simple_operand_p (const_tree exp)
3681 /* Strip any conversions that don't change the machine mode. */
3684 return (CONSTANT_CLASS_P (exp)
3685 || TREE_CODE (exp) == SSA_NAME
3687 && ! TREE_ADDRESSABLE (exp)
3688 && ! TREE_THIS_VOLATILE (exp)
3689 && ! DECL_NONLOCAL (exp)
3690 /* Don't regard global variables as simple. They may be
3691 allocated in ways unknown to the compiler (shared memory,
3692 #pragma weak, etc). */
3693 && ! TREE_PUBLIC (exp)
3694 && ! DECL_EXTERNAL (exp)
3695 /* Loading a static variable is unduly expensive, but global
3696 registers aren't expensive. */
3697 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3700 /* The following functions are subroutines to fold_range_test and allow it to
3701 try to change a logical combination of comparisons into a range test.
3704 X == 2 || X == 3 || X == 4 || X == 5
3708 (unsigned) (X - 2) <= 3
3710 We describe each set of comparisons as being either inside or outside
3711 a range, using a variable named like IN_P, and then describe the
3712 range with a lower and upper bound. If one of the bounds is omitted,
3713 it represents either the highest or lowest value of the type.
3715 In the comments below, we represent a range by two numbers in brackets
3716 preceded by a "+" to designate being inside that range, or a "-" to
3717 designate being outside that range, so the condition can be inverted by
3718 flipping the prefix. An omitted bound is represented by a "-". For
3719 example, "- [-, 10]" means being outside the range starting at the lowest
3720 possible value and ending at 10, in other words, being greater than 10.
3721 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3724 We set up things so that the missing bounds are handled in a consistent
3725 manner so neither a missing bound nor "true" and "false" need to be
3726 handled using a special case. */
3728 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3729 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3730 and UPPER1_P are nonzero if the respective argument is an upper bound
3731 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3732 must be specified for a comparison. ARG1 will be converted to ARG0's
3733 type if both are specified. */
3736 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3737 tree arg1, int upper1_p)
3743 /* If neither arg represents infinity, do the normal operation.
3744 Else, if not a comparison, return infinity. Else handle the special
3745 comparison rules. Note that most of the cases below won't occur, but
3746 are handled for consistency. */
3748 if (arg0 != 0 && arg1 != 0)
3750 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3751 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3753 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3756 if (TREE_CODE_CLASS (code) != tcc_comparison)
3759 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3760 for neither. In real maths, we cannot assume open ended ranges are
3761 the same. But, this is computer arithmetic, where numbers are finite.
3762 We can therefore make the transformation of any unbounded range with
3763 the value Z, Z being greater than any representable number. This permits
3764 us to treat unbounded ranges as equal. */
3765 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3766 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3770 result = sgn0 == sgn1;
3773 result = sgn0 != sgn1;
3776 result = sgn0 < sgn1;
3779 result = sgn0 <= sgn1;
3782 result = sgn0 > sgn1;
3785 result = sgn0 >= sgn1;
3791 return constant_boolean_node (result, type);
3794 /* Given EXP, a logical expression, set the range it is testing into
3795 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3796 actually being tested. *PLOW and *PHIGH will be made of the same
3797 type as the returned expression. If EXP is not a comparison, we
3798 will most likely not be returning a useful value and range. Set
3799 *STRICT_OVERFLOW_P to true if the return value is only valid
3800 because signed overflow is undefined; otherwise, do not change
3801 *STRICT_OVERFLOW_P. */
3804 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3805 bool *strict_overflow_p)
3807 enum tree_code code;
3808 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3809 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3811 tree low, high, n_low, n_high;
3812 location_t loc = EXPR_LOCATION (exp);
3814 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3815 and see if we can refine the range. Some of the cases below may not
3816 happen, but it doesn't seem worth worrying about this. We "continue"
3817 the outer loop when we've changed something; otherwise we "break"
3818 the switch, which will "break" the while. */
3821 low = high = build_int_cst (TREE_TYPE (exp), 0);
3825 code = TREE_CODE (exp);
3826 exp_type = TREE_TYPE (exp);
3828 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3830 if (TREE_OPERAND_LENGTH (exp) > 0)
3831 arg0 = TREE_OPERAND (exp, 0);
3832 if (TREE_CODE_CLASS (code) == tcc_comparison
3833 || TREE_CODE_CLASS (code) == tcc_unary
3834 || TREE_CODE_CLASS (code) == tcc_binary)
3835 arg0_type = TREE_TYPE (arg0);
3836 if (TREE_CODE_CLASS (code) == tcc_binary
3837 || TREE_CODE_CLASS (code) == tcc_comparison
3838 || (TREE_CODE_CLASS (code) == tcc_expression
3839 && TREE_OPERAND_LENGTH (exp) > 1))
3840 arg1 = TREE_OPERAND (exp, 1);
3845 case TRUTH_NOT_EXPR:
3846 in_p = ! in_p, exp = arg0;
3849 case EQ_EXPR: case NE_EXPR:
3850 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3851 /* We can only do something if the range is testing for zero
3852 and if the second operand is an integer constant. Note that
3853 saying something is "in" the range we make is done by
3854 complementing IN_P since it will set in the initial case of
3855 being not equal to zero; "out" is leaving it alone. */
3856 if (low == 0 || high == 0
3857 || ! integer_zerop (low) || ! integer_zerop (high)
3858 || TREE_CODE (arg1) != INTEGER_CST)
3863 case NE_EXPR: /* - [c, c] */
3866 case EQ_EXPR: /* + [c, c] */
3867 in_p = ! in_p, low = high = arg1;
3869 case GT_EXPR: /* - [-, c] */
3870 low = 0, high = arg1;
3872 case GE_EXPR: /* + [c, -] */
3873 in_p = ! in_p, low = arg1, high = 0;
3875 case LT_EXPR: /* - [c, -] */
3876 low = arg1, high = 0;
3878 case LE_EXPR: /* + [-, c] */
3879 in_p = ! in_p, low = 0, high = arg1;
3885 /* If this is an unsigned comparison, we also know that EXP is
3886 greater than or equal to zero. We base the range tests we make
3887 on that fact, so we record it here so we can parse existing
3888 range tests. We test arg0_type since often the return type
3889 of, e.g. EQ_EXPR, is boolean. */
3890 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3892 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3894 build_int_cst (arg0_type, 0),
3898 in_p = n_in_p, low = n_low, high = n_high;
3900 /* If the high bound is missing, but we have a nonzero low
3901 bound, reverse the range so it goes from zero to the low bound
3903 if (high == 0 && low && ! integer_zerop (low))
3906 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3907 integer_one_node, 0);
3908 low = build_int_cst (arg0_type, 0);
3916 /* (-x) IN [a,b] -> x in [-b, -a] */
3917 n_low = range_binop (MINUS_EXPR, exp_type,
3918 build_int_cst (exp_type, 0),
3920 n_high = range_binop (MINUS_EXPR, exp_type,
3921 build_int_cst (exp_type, 0),
3923 if (n_high != 0 && TREE_OVERFLOW (n_high))
3929 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3930 build_int_cst (exp_type, 1));
3933 case PLUS_EXPR: case MINUS_EXPR:
3934 if (TREE_CODE (arg1) != INTEGER_CST)
3937 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3938 move a constant to the other side. */
3939 if (!TYPE_UNSIGNED (arg0_type)
3940 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3943 /* If EXP is signed, any overflow in the computation is undefined,
3944 so we don't worry about it so long as our computations on
3945 the bounds don't overflow. For unsigned, overflow is defined
3946 and this is exactly the right thing. */
3947 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3948 arg0_type, low, 0, arg1, 0);
3949 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3950 arg0_type, high, 1, arg1, 0);
3951 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3952 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3955 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3956 *strict_overflow_p = true;
3959 /* Check for an unsigned range which has wrapped around the maximum
3960 value thus making n_high < n_low, and normalize it. */
3961 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3963 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3964 integer_one_node, 0);
3965 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3966 integer_one_node, 0);
3968 /* If the range is of the form +/- [ x+1, x ], we won't
3969 be able to normalize it. But then, it represents the
3970 whole range or the empty set, so make it
3972 if (tree_int_cst_equal (n_low, low)
3973 && tree_int_cst_equal (n_high, high))
3979 low = n_low, high = n_high;
3984 CASE_CONVERT: case NON_LVALUE_EXPR:
3985 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3988 if (! INTEGRAL_TYPE_P (arg0_type)
3989 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3990 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3993 n_low = low, n_high = high;
3996 n_low = fold_convert_loc (loc, arg0_type, n_low);
3999 n_high = fold_convert_loc (loc, arg0_type, n_high);
4002 /* If we're converting arg0 from an unsigned type, to exp,
4003 a signed type, we will be doing the comparison as unsigned.
4004 The tests above have already verified that LOW and HIGH
4007 So we have to ensure that we will handle large unsigned
4008 values the same way that the current signed bounds treat
4011 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4015 /* For fixed-point modes, we need to pass the saturating flag
4016 as the 2nd parameter. */
4017 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4018 equiv_type = lang_hooks.types.type_for_mode
4019 (TYPE_MODE (arg0_type),
4020 TYPE_SATURATING (arg0_type));
4022 equiv_type = lang_hooks.types.type_for_mode
4023 (TYPE_MODE (arg0_type), 1);
4025 /* A range without an upper bound is, naturally, unbounded.
4026 Since convert would have cropped a very large value, use
4027 the max value for the destination type. */
4029 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4030 : TYPE_MAX_VALUE (arg0_type);
4032 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4033 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4034 fold_convert_loc (loc, arg0_type,
4036 build_int_cst (arg0_type, 1));
4038 /* If the low bound is specified, "and" the range with the
4039 range for which the original unsigned value will be
4043 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4044 1, n_low, n_high, 1,
4045 fold_convert_loc (loc, arg0_type,
4050 in_p = (n_in_p == in_p);
4054 /* Otherwise, "or" the range with the range of the input
4055 that will be interpreted as negative. */
4056 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4057 0, n_low, n_high, 1,
4058 fold_convert_loc (loc, arg0_type,
4063 in_p = (in_p != n_in_p);
4068 low = n_low, high = n_high;
4078 /* If EXP is a constant, we can evaluate whether this is true or false. */
4079 if (TREE_CODE (exp) == INTEGER_CST)
4081 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4083 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4089 *pin_p = in_p, *plow = low, *phigh = high;
4093 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4094 type, TYPE, return an expression to test if EXP is in (or out of, depending
4095 on IN_P) the range. Return 0 if the test couldn't be created. */
4098 build_range_check (location_t loc, tree type, tree exp, int in_p,
4099 tree low, tree high)
4101 tree etype = TREE_TYPE (exp), value;
4103 #ifdef HAVE_canonicalize_funcptr_for_compare
4104 /* Disable this optimization for function pointer expressions
4105 on targets that require function pointer canonicalization. */
4106 if (HAVE_canonicalize_funcptr_for_compare
4107 && TREE_CODE (etype) == POINTER_TYPE
4108 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4114 value = build_range_check (loc, type, exp, 1, low, high);
4116 return invert_truthvalue_loc (loc, value);
4121 if (low == 0 && high == 0)
4122 return build_int_cst (type, 1);
4125 return fold_build2_loc (loc, LE_EXPR, type, exp,
4126 fold_convert_loc (loc, etype, high));
4129 return fold_build2_loc (loc, GE_EXPR, type, exp,
4130 fold_convert_loc (loc, etype, low));
4132 if (operand_equal_p (low, high, 0))
4133 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4134 fold_convert_loc (loc, etype, low));
4136 if (integer_zerop (low))
4138 if (! TYPE_UNSIGNED (etype))
4140 etype = unsigned_type_for (etype);
4141 high = fold_convert_loc (loc, etype, high);
4142 exp = fold_convert_loc (loc, etype, exp);
4144 return build_range_check (loc, type, exp, 1, 0, high);
4147 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4148 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4150 unsigned HOST_WIDE_INT lo;
4154 prec = TYPE_PRECISION (etype);
4155 if (prec <= HOST_BITS_PER_WIDE_INT)
4158 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4162 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4163 lo = (unsigned HOST_WIDE_INT) -1;
4166 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4168 if (TYPE_UNSIGNED (etype))
4170 tree signed_etype = signed_type_for (etype);
4171 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4173 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4175 etype = signed_etype;
4176 exp = fold_convert_loc (loc, etype, exp);
4178 return fold_build2_loc (loc, GT_EXPR, type, exp,
4179 build_int_cst (etype, 0));
4183 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4184 This requires wrap-around arithmetics for the type of the expression.
4185 First make sure that arithmetics in this type is valid, then make sure
4186 that it wraps around. */
4187 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4188 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4189 TYPE_UNSIGNED (etype));
4191 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4193 tree utype, minv, maxv;
4195 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4196 for the type in question, as we rely on this here. */
4197 utype = unsigned_type_for (etype);
4198 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4199 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4200 integer_one_node, 1);
4201 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4203 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4210 high = fold_convert_loc (loc, etype, high);
4211 low = fold_convert_loc (loc, etype, low);
4212 exp = fold_convert_loc (loc, etype, exp);
4214 value = const_binop (MINUS_EXPR, high, low);
4217 if (POINTER_TYPE_P (etype))
4219 if (value != 0 && !TREE_OVERFLOW (value))
4221 low = fold_convert_loc (loc, sizetype, low);
4222 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4223 return build_range_check (loc, type,
4224 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4226 1, build_int_cst (etype, 0), value);
4231 if (value != 0 && !TREE_OVERFLOW (value))
4232 return build_range_check (loc, type,
4233 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4234 1, build_int_cst (etype, 0), value);
4239 /* Return the predecessor of VAL in its type, handling the infinite case. */
4242 range_predecessor (tree val)
4244 tree type = TREE_TYPE (val);
4246 if (INTEGRAL_TYPE_P (type)
4247 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4250 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4253 /* Return the successor of VAL in its type, handling the infinite case. */
4256 range_successor (tree val)
4258 tree type = TREE_TYPE (val);
4260 if (INTEGRAL_TYPE_P (type)
4261 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4264 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4267 /* Given two ranges, see if we can merge them into one. Return 1 if we
4268 can, 0 if we can't. Set the output range into the specified parameters. */
4271 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4272 tree high0, int in1_p, tree low1, tree high1)
4280 int lowequal = ((low0 == 0 && low1 == 0)
4281 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4282 low0, 0, low1, 0)));
4283 int highequal = ((high0 == 0 && high1 == 0)
4284 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4285 high0, 1, high1, 1)));
4287 /* Make range 0 be the range that starts first, or ends last if they
4288 start at the same value. Swap them if it isn't. */
4289 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4292 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4293 high1, 1, high0, 1))))
4295 temp = in0_p, in0_p = in1_p, in1_p = temp;
4296 tem = low0, low0 = low1, low1 = tem;
4297 tem = high0, high0 = high1, high1 = tem;
4300 /* Now flag two cases, whether the ranges are disjoint or whether the
4301 second range is totally subsumed in the first. Note that the tests
4302 below are simplified by the ones above. */
4303 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4304 high0, 1, low1, 0));
4305 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4306 high1, 1, high0, 1));
4308 /* We now have four cases, depending on whether we are including or
4309 excluding the two ranges. */
4312 /* If they don't overlap, the result is false. If the second range
4313 is a subset it is the result. Otherwise, the range is from the start
4314 of the second to the end of the first. */
4316 in_p = 0, low = high = 0;
4318 in_p = 1, low = low1, high = high1;
4320 in_p = 1, low = low1, high = high0;
4323 else if (in0_p && ! in1_p)
4325 /* If they don't overlap, the result is the first range. If they are
4326 equal, the result is false. If the second range is a subset of the
4327 first, and the ranges begin at the same place, we go from just after
4328 the end of the second range to the end of the first. If the second
4329 range is not a subset of the first, or if it is a subset and both
4330 ranges end at the same place, the range starts at the start of the
4331 first range and ends just before the second range.
4332 Otherwise, we can't describe this as a single range. */
4334 in_p = 1, low = low0, high = high0;
4335 else if (lowequal && highequal)
4336 in_p = 0, low = high = 0;
4337 else if (subset && lowequal)
4339 low = range_successor (high1);
4344 /* We are in the weird situation where high0 > high1 but
4345 high1 has no successor. Punt. */
4349 else if (! subset || highequal)
4352 high = range_predecessor (low1);
4356 /* low0 < low1 but low1 has no predecessor. Punt. */
4364 else if (! in0_p && in1_p)
4366 /* If they don't overlap, the result is the second range. If the second
4367 is a subset of the first, the result is false. Otherwise,
4368 the range starts just after the first range and ends at the
4369 end of the second. */
4371 in_p = 1, low = low1, high = high1;
4372 else if (subset || highequal)
4373 in_p = 0, low = high = 0;
4376 low = range_successor (high0);
4381 /* high1 > high0 but high0 has no successor. Punt. */
4389 /* The case where we are excluding both ranges. Here the complex case
4390 is if they don't overlap. In that case, the only time we have a
4391 range is if they are adjacent. If the second is a subset of the
4392 first, the result is the first. Otherwise, the range to exclude
4393 starts at the beginning of the first range and ends at the end of the
4397 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4398 range_successor (high0),
4400 in_p = 0, low = low0, high = high1;
4403 /* Canonicalize - [min, x] into - [-, x]. */
4404 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4405 switch (TREE_CODE (TREE_TYPE (low0)))
4408 if (TYPE_PRECISION (TREE_TYPE (low0))
4409 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4413 if (tree_int_cst_equal (low0,
4414 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4418 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4419 && integer_zerop (low0))
4426 /* Canonicalize - [x, max] into - [x, -]. */
4427 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4428 switch (TREE_CODE (TREE_TYPE (high1)))
4431 if (TYPE_PRECISION (TREE_TYPE (high1))
4432 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4436 if (tree_int_cst_equal (high1,
4437 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4441 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4442 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4444 integer_one_node, 1)))
4451 /* The ranges might be also adjacent between the maximum and
4452 minimum values of the given type. For
4453 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4454 return + [x + 1, y - 1]. */
4455 if (low0 == 0 && high1 == 0)
4457 low = range_successor (high0);
4458 high = range_predecessor (low1);
4459 if (low == 0 || high == 0)
4469 in_p = 0, low = low0, high = high0;
4471 in_p = 0, low = low0, high = high1;
4474 *pin_p = in_p, *plow = low, *phigh = high;
4479 /* Subroutine of fold, looking inside expressions of the form
4480 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4481 of the COND_EXPR. This function is being used also to optimize
4482 A op B ? C : A, by reversing the comparison first.
4484 Return a folded expression whose code is not a COND_EXPR
4485 anymore, or NULL_TREE if no folding opportunity is found. */
4488 fold_cond_expr_with_comparison (location_t loc, tree type,
4489 tree arg0, tree arg1, tree arg2)
4491 enum tree_code comp_code = TREE_CODE (arg0);
4492 tree arg00 = TREE_OPERAND (arg0, 0);
4493 tree arg01 = TREE_OPERAND (arg0, 1);
4494 tree arg1_type = TREE_TYPE (arg1);
4500 /* If we have A op 0 ? A : -A, consider applying the following
4503 A == 0? A : -A same as -A
4504 A != 0? A : -A same as A
4505 A >= 0? A : -A same as abs (A)
4506 A > 0? A : -A same as abs (A)
4507 A <= 0? A : -A same as -abs (A)
4508 A < 0? A : -A same as -abs (A)
4510 None of these transformations work for modes with signed
4511 zeros. If A is +/-0, the first two transformations will
4512 change the sign of the result (from +0 to -0, or vice
4513 versa). The last four will fix the sign of the result,
4514 even though the original expressions could be positive or
4515 negative, depending on the sign of A.
4517 Note that all these transformations are correct if A is
4518 NaN, since the two alternatives (A and -A) are also NaNs. */
4519 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4520 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4521 ? real_zerop (arg01)
4522 : integer_zerop (arg01))
4523 && ((TREE_CODE (arg2) == NEGATE_EXPR
4524 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4525 /* In the case that A is of the form X-Y, '-A' (arg2) may
4526 have already been folded to Y-X, check for that. */
4527 || (TREE_CODE (arg1) == MINUS_EXPR
4528 && TREE_CODE (arg2) == MINUS_EXPR
4529 && operand_equal_p (TREE_OPERAND (arg1, 0),
4530 TREE_OPERAND (arg2, 1), 0)
4531 && operand_equal_p (TREE_OPERAND (arg1, 1),
4532 TREE_OPERAND (arg2, 0), 0))))
4537 tem = fold_convert_loc (loc, arg1_type, arg1);
4538 return pedantic_non_lvalue_loc (loc,
4539 fold_convert_loc (loc, type,
4540 negate_expr (tem)));
4543 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4546 if (flag_trapping_math)
4551 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4552 arg1 = fold_convert_loc (loc, signed_type_for
4553 (TREE_TYPE (arg1)), arg1);
4554 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4555 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4558 if (flag_trapping_math)
4562 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4563 arg1 = fold_convert_loc (loc, signed_type_for
4564 (TREE_TYPE (arg1)), arg1);
4565 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4566 return negate_expr (fold_convert_loc (loc, type, tem));
4568 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4572 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4573 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4574 both transformations are correct when A is NaN: A != 0
4575 is then true, and A == 0 is false. */
4577 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4578 && integer_zerop (arg01) && integer_zerop (arg2))
4580 if (comp_code == NE_EXPR)
4581 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4582 else if (comp_code == EQ_EXPR)
4583 return build_int_cst (type, 0);
4586 /* Try some transformations of A op B ? A : B.
4588 A == B? A : B same as B
4589 A != B? A : B same as A
4590 A >= B? A : B same as max (A, B)
4591 A > B? A : B same as max (B, A)
4592 A <= B? A : B same as min (A, B)
4593 A < B? A : B same as min (B, A)
4595 As above, these transformations don't work in the presence
4596 of signed zeros. For example, if A and B are zeros of
4597 opposite sign, the first two transformations will change
4598 the sign of the result. In the last four, the original
4599 expressions give different results for (A=+0, B=-0) and
4600 (A=-0, B=+0), but the transformed expressions do not.
4602 The first two transformations are correct if either A or B
4603 is a NaN. In the first transformation, the condition will
4604 be false, and B will indeed be chosen. In the case of the
4605 second transformation, the condition A != B will be true,
4606 and A will be chosen.
4608 The conversions to max() and min() are not correct if B is
4609 a number and A is not. The conditions in the original
4610 expressions will be false, so all four give B. The min()
4611 and max() versions would give a NaN instead. */
4612 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4613 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4614 /* Avoid these transformations if the COND_EXPR may be used
4615 as an lvalue in the C++ front-end. PR c++/19199. */
4617 || (strcmp (lang_hooks.name, "GNU C++") != 0
4618 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4619 || ! maybe_lvalue_p (arg1)
4620 || ! maybe_lvalue_p (arg2)))
4622 tree comp_op0 = arg00;
4623 tree comp_op1 = arg01;
4624 tree comp_type = TREE_TYPE (comp_op0);
4626 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4627 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4637 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4639 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4644 /* In C++ a ?: expression can be an lvalue, so put the
4645 operand which will be used if they are equal first
4646 so that we can convert this back to the
4647 corresponding COND_EXPR. */
4648 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4650 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4651 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4652 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4653 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4654 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4655 comp_op1, comp_op0);
4656 return pedantic_non_lvalue_loc (loc,
4657 fold_convert_loc (loc, type, tem));
4664 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4666 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4667 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4668 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4669 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4670 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4671 comp_op1, comp_op0);
4672 return pedantic_non_lvalue_loc (loc,
4673 fold_convert_loc (loc, type, tem));
4677 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4678 return pedantic_non_lvalue_loc (loc,
4679 fold_convert_loc (loc, type, arg2));
4682 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4683 return pedantic_non_lvalue_loc (loc,
4684 fold_convert_loc (loc, type, arg1));
4687 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4692 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4693 we might still be able to simplify this. For example,
4694 if C1 is one less or one more than C2, this might have started
4695 out as a MIN or MAX and been transformed by this function.
4696 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4698 if (INTEGRAL_TYPE_P (type)
4699 && TREE_CODE (arg01) == INTEGER_CST
4700 && TREE_CODE (arg2) == INTEGER_CST)
4704 if (TREE_CODE (arg1) == INTEGER_CST)
4706 /* We can replace A with C1 in this case. */
4707 arg1 = fold_convert_loc (loc, type, arg01);
4708 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4711 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4712 MIN_EXPR, to preserve the signedness of the comparison. */
4713 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4715 && operand_equal_p (arg01,
4716 const_binop (PLUS_EXPR, arg2,
4717 build_int_cst (type, 1)),
4720 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4721 fold_convert_loc (loc, TREE_TYPE (arg00),
4723 return pedantic_non_lvalue_loc (loc,
4724 fold_convert_loc (loc, type, tem));
4729 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4731 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4733 && operand_equal_p (arg01,
4734 const_binop (MINUS_EXPR, arg2,
4735 build_int_cst (type, 1)),
4738 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4739 fold_convert_loc (loc, TREE_TYPE (arg00),
4741 return pedantic_non_lvalue_loc (loc,
4742 fold_convert_loc (loc, type, tem));
4747 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4748 MAX_EXPR, to preserve the signedness of the comparison. */
4749 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4751 && operand_equal_p (arg01,
4752 const_binop (MINUS_EXPR, arg2,
4753 build_int_cst (type, 1)),
4756 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4757 fold_convert_loc (loc, TREE_TYPE (arg00),
4759 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4764 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4765 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4767 && operand_equal_p (arg01,
4768 const_binop (PLUS_EXPR, arg2,
4769 build_int_cst (type, 1)),
4772 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4773 fold_convert_loc (loc, TREE_TYPE (arg00),
4775 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4789 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4790 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4791 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4795 /* EXP is some logical combination of boolean tests. See if we can
4796 merge it into some range test. Return the new tree if so. */
4799 fold_range_test (location_t loc, enum tree_code code, tree type,
4802 int or_op = (code == TRUTH_ORIF_EXPR
4803 || code == TRUTH_OR_EXPR);
4804 int in0_p, in1_p, in_p;
4805 tree low0, low1, low, high0, high1, high;
4806 bool strict_overflow_p = false;
4807 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4808 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4810 const char * const warnmsg = G_("assuming signed overflow does not occur "
4811 "when simplifying range test");
4813 /* If this is an OR operation, invert both sides; we will invert
4814 again at the end. */
4816 in0_p = ! in0_p, in1_p = ! in1_p;
4818 /* If both expressions are the same, if we can merge the ranges, and we
4819 can build the range test, return it or it inverted. If one of the
4820 ranges is always true or always false, consider it to be the same
4821 expression as the other. */
4822 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4823 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4825 && 0 != (tem = (build_range_check (loc, type,
4827 : rhs != 0 ? rhs : integer_zero_node,
4830 if (strict_overflow_p)
4831 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4832 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4835 /* On machines where the branch cost is expensive, if this is a
4836 short-circuited branch and the underlying object on both sides
4837 is the same, make a non-short-circuit operation. */
4838 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4839 && lhs != 0 && rhs != 0
4840 && (code == TRUTH_ANDIF_EXPR
4841 || code == TRUTH_ORIF_EXPR)
4842 && operand_equal_p (lhs, rhs, 0))
4844 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4845 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4846 which cases we can't do this. */
4847 if (simple_operand_p (lhs))
4848 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4849 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4852 else if (!lang_hooks.decls.global_bindings_p ()
4853 && !CONTAINS_PLACEHOLDER_P (lhs))
4855 tree common = save_expr (lhs);
4857 if (0 != (lhs = build_range_check (loc, type, common,
4858 or_op ? ! in0_p : in0_p,
4860 && (0 != (rhs = build_range_check (loc, type, common,
4861 or_op ? ! in1_p : in1_p,
4864 if (strict_overflow_p)
4865 fold_overflow_warning (warnmsg,
4866 WARN_STRICT_OVERFLOW_COMPARISON);
4867 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4868 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4877 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4878 bit value. Arrange things so the extra bits will be set to zero if and
4879 only if C is signed-extended to its full width. If MASK is nonzero,
4880 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4883 unextend (tree c, int p, int unsignedp, tree mask)
4885 tree type = TREE_TYPE (c);
4886 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4889 if (p == modesize || unsignedp)
4892 /* We work by getting just the sign bit into the low-order bit, then
4893 into the high-order bit, then sign-extend. We then XOR that value
4895 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4896 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4898 /* We must use a signed type in order to get an arithmetic right shift.
4899 However, we must also avoid introducing accidental overflows, so that
4900 a subsequent call to integer_zerop will work. Hence we must
4901 do the type conversion here. At this point, the constant is either
4902 zero or one, and the conversion to a signed type can never overflow.
4903 We could get an overflow if this conversion is done anywhere else. */
4904 if (TYPE_UNSIGNED (type))
4905 temp = fold_convert (signed_type_for (type), temp);
4907 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4908 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4910 temp = const_binop (BIT_AND_EXPR, temp,
4911 fold_convert (TREE_TYPE (c), mask));
4912 /* If necessary, convert the type back to match the type of C. */
4913 if (TYPE_UNSIGNED (type))
4914 temp = fold_convert (type, temp);
4916 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4919 /* For an expression that has the form
4923 we can drop one of the inner expressions and simplify to
4927 LOC is the location of the resulting expression. OP is the inner
4928 logical operation; the left-hand side in the examples above, while CMPOP
4929 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4930 removing a condition that guards another, as in
4931 (A != NULL && A->...) || A == NULL
4932 which we must not transform. If RHS_ONLY is true, only eliminate the
4933 right-most operand of the inner logical operation. */
4936 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4939 tree type = TREE_TYPE (cmpop);
4940 enum tree_code code = TREE_CODE (cmpop);
4941 enum tree_code truthop_code = TREE_CODE (op);
4942 tree lhs = TREE_OPERAND (op, 0);
4943 tree rhs = TREE_OPERAND (op, 1);
4944 tree orig_lhs = lhs, orig_rhs = rhs;
4945 enum tree_code rhs_code = TREE_CODE (rhs);
4946 enum tree_code lhs_code = TREE_CODE (lhs);
4947 enum tree_code inv_code;
4949 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4952 if (TREE_CODE_CLASS (code) != tcc_comparison)
4955 if (rhs_code == truthop_code)
4957 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4958 if (newrhs != NULL_TREE)
4961 rhs_code = TREE_CODE (rhs);
4964 if (lhs_code == truthop_code && !rhs_only)
4966 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4967 if (newlhs != NULL_TREE)
4970 lhs_code = TREE_CODE (lhs);
4974 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4975 if (inv_code == rhs_code
4976 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4977 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4979 if (!rhs_only && inv_code == lhs_code
4980 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4981 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4983 if (rhs != orig_rhs || lhs != orig_lhs)
4984 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
4989 /* Find ways of folding logical expressions of LHS and RHS:
4990 Try to merge two comparisons to the same innermost item.
4991 Look for range tests like "ch >= '0' && ch <= '9'".
4992 Look for combinations of simple terms on machines with expensive branches
4993 and evaluate the RHS unconditionally.
4995 For example, if we have p->a == 2 && p->b == 4 and we can make an
4996 object large enough to span both A and B, we can do this with a comparison
4997 against the object ANDed with the a mask.
4999 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5000 operations to do this with one comparison.
5002 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5003 function and the one above.
5005 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5006 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5008 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5011 We return the simplified tree or 0 if no optimization is possible. */
5014 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5017 /* If this is the "or" of two comparisons, we can do something if
5018 the comparisons are NE_EXPR. If this is the "and", we can do something
5019 if the comparisons are EQ_EXPR. I.e.,
5020 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5022 WANTED_CODE is this operation code. For single bit fields, we can
5023 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5024 comparison for one-bit fields. */
5026 enum tree_code wanted_code;
5027 enum tree_code lcode, rcode;
5028 tree ll_arg, lr_arg, rl_arg, rr_arg;
5029 tree ll_inner, lr_inner, rl_inner, rr_inner;
5030 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5031 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5032 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5033 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5034 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5035 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5036 enum machine_mode lnmode, rnmode;
5037 tree ll_mask, lr_mask, rl_mask, rr_mask;
5038 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5039 tree l_const, r_const;
5040 tree lntype, rntype, result;
5041 HOST_WIDE_INT first_bit, end_bit;
5043 tree orig_lhs = lhs, orig_rhs = rhs;
5044 enum tree_code orig_code = code;
5046 /* Start by getting the comparison codes. Fail if anything is volatile.
5047 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5048 it were surrounded with a NE_EXPR. */
5050 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5053 lcode = TREE_CODE (lhs);
5054 rcode = TREE_CODE (rhs);
5056 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5058 lhs = build2 (NE_EXPR, truth_type, lhs,
5059 build_int_cst (TREE_TYPE (lhs), 0));
5063 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5065 rhs = build2 (NE_EXPR, truth_type, rhs,
5066 build_int_cst (TREE_TYPE (rhs), 0));
5070 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5071 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5074 ll_arg = TREE_OPERAND (lhs, 0);
5075 lr_arg = TREE_OPERAND (lhs, 1);
5076 rl_arg = TREE_OPERAND (rhs, 0);
5077 rr_arg = TREE_OPERAND (rhs, 1);
5079 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5080 if (simple_operand_p (ll_arg)
5081 && simple_operand_p (lr_arg))
5083 if (operand_equal_p (ll_arg, rl_arg, 0)
5084 && operand_equal_p (lr_arg, rr_arg, 0))
5086 result = combine_comparisons (loc, code, lcode, rcode,
5087 truth_type, ll_arg, lr_arg);
5091 else if (operand_equal_p (ll_arg, rr_arg, 0)
5092 && operand_equal_p (lr_arg, rl_arg, 0))
5094 result = combine_comparisons (loc, code, lcode,
5095 swap_tree_comparison (rcode),
5096 truth_type, ll_arg, lr_arg);
5102 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5103 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5105 /* If the RHS can be evaluated unconditionally and its operands are
5106 simple, it wins to evaluate the RHS unconditionally on machines
5107 with expensive branches. In this case, this isn't a comparison
5108 that can be merged. Avoid doing this if the RHS is a floating-point
5109 comparison since those can trap. */
5111 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5113 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5114 && simple_operand_p (rl_arg)
5115 && simple_operand_p (rr_arg))
5117 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5118 if (code == TRUTH_OR_EXPR
5119 && lcode == NE_EXPR && integer_zerop (lr_arg)
5120 && rcode == NE_EXPR && integer_zerop (rr_arg)
5121 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5122 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5123 return build2_loc (loc, NE_EXPR, truth_type,
5124 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5126 build_int_cst (TREE_TYPE (ll_arg), 0));
5128 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5129 if (code == TRUTH_AND_EXPR
5130 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5131 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5132 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5133 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5134 return build2_loc (loc, EQ_EXPR, truth_type,
5135 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5137 build_int_cst (TREE_TYPE (ll_arg), 0));
5139 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5141 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5142 return build2_loc (loc, code, truth_type, lhs, rhs);
5147 /* See if the comparisons can be merged. Then get all the parameters for
5150 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5151 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5155 ll_inner = decode_field_reference (loc, ll_arg,
5156 &ll_bitsize, &ll_bitpos, &ll_mode,
5157 &ll_unsignedp, &volatilep, &ll_mask,
5159 lr_inner = decode_field_reference (loc, lr_arg,
5160 &lr_bitsize, &lr_bitpos, &lr_mode,
5161 &lr_unsignedp, &volatilep, &lr_mask,
5163 rl_inner = decode_field_reference (loc, rl_arg,
5164 &rl_bitsize, &rl_bitpos, &rl_mode,
5165 &rl_unsignedp, &volatilep, &rl_mask,
5167 rr_inner = decode_field_reference (loc, rr_arg,
5168 &rr_bitsize, &rr_bitpos, &rr_mode,
5169 &rr_unsignedp, &volatilep, &rr_mask,
5172 /* It must be true that the inner operation on the lhs of each
5173 comparison must be the same if we are to be able to do anything.
5174 Then see if we have constants. If not, the same must be true for
5176 if (volatilep || ll_inner == 0 || rl_inner == 0
5177 || ! operand_equal_p (ll_inner, rl_inner, 0))
5180 if (TREE_CODE (lr_arg) == INTEGER_CST
5181 && TREE_CODE (rr_arg) == INTEGER_CST)
5182 l_const = lr_arg, r_const = rr_arg;
5183 else if (lr_inner == 0 || rr_inner == 0
5184 || ! operand_equal_p (lr_inner, rr_inner, 0))
5187 l_const = r_const = 0;
5189 /* If either comparison code is not correct for our logical operation,
5190 fail. However, we can convert a one-bit comparison against zero into
5191 the opposite comparison against that bit being set in the field. */
5193 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5194 if (lcode != wanted_code)
5196 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5198 /* Make the left operand unsigned, since we are only interested
5199 in the value of one bit. Otherwise we are doing the wrong
5208 /* This is analogous to the code for l_const above. */
5209 if (rcode != wanted_code)
5211 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5220 /* See if we can find a mode that contains both fields being compared on
5221 the left. If we can't, fail. Otherwise, update all constants and masks
5222 to be relative to a field of that size. */
5223 first_bit = MIN (ll_bitpos, rl_bitpos);
5224 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5225 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5226 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5228 if (lnmode == VOIDmode)
5231 lnbitsize = GET_MODE_BITSIZE (lnmode);
5232 lnbitpos = first_bit & ~ (lnbitsize - 1);
5233 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5234 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5236 if (BYTES_BIG_ENDIAN)
5238 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5239 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5242 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5243 size_int (xll_bitpos));
5244 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5245 size_int (xrl_bitpos));
5249 l_const = fold_convert_loc (loc, lntype, l_const);
5250 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5251 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5252 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5253 fold_build1_loc (loc, BIT_NOT_EXPR,
5256 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5258 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5263 r_const = fold_convert_loc (loc, lntype, r_const);
5264 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5265 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5266 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5267 fold_build1_loc (loc, BIT_NOT_EXPR,
5270 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5272 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5276 /* If the right sides are not constant, do the same for it. Also,
5277 disallow this optimization if a size or signedness mismatch occurs
5278 between the left and right sides. */
5281 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5282 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5283 /* Make sure the two fields on the right
5284 correspond to the left without being swapped. */
5285 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5288 first_bit = MIN (lr_bitpos, rr_bitpos);
5289 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5290 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5291 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5293 if (rnmode == VOIDmode)
5296 rnbitsize = GET_MODE_BITSIZE (rnmode);
5297 rnbitpos = first_bit & ~ (rnbitsize - 1);
5298 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5299 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5301 if (BYTES_BIG_ENDIAN)
5303 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5304 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5307 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5309 size_int (xlr_bitpos));
5310 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5312 size_int (xrr_bitpos));
5314 /* Make a mask that corresponds to both fields being compared.
5315 Do this for both items being compared. If the operands are the
5316 same size and the bits being compared are in the same position
5317 then we can do this by masking both and comparing the masked
5319 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5320 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5321 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5323 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5324 ll_unsignedp || rl_unsignedp);
5325 if (! all_ones_mask_p (ll_mask, lnbitsize))
5326 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5328 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5329 lr_unsignedp || rr_unsignedp);
5330 if (! all_ones_mask_p (lr_mask, rnbitsize))
5331 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5333 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5336 /* There is still another way we can do something: If both pairs of
5337 fields being compared are adjacent, we may be able to make a wider
5338 field containing them both.
5340 Note that we still must mask the lhs/rhs expressions. Furthermore,
5341 the mask must be shifted to account for the shift done by
5342 make_bit_field_ref. */
5343 if ((ll_bitsize + ll_bitpos == rl_bitpos
5344 && lr_bitsize + lr_bitpos == rr_bitpos)
5345 || (ll_bitpos == rl_bitpos + rl_bitsize
5346 && lr_bitpos == rr_bitpos + rr_bitsize))
5350 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5351 ll_bitsize + rl_bitsize,
5352 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5353 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5354 lr_bitsize + rr_bitsize,
5355 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5357 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5358 size_int (MIN (xll_bitpos, xrl_bitpos)));
5359 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5360 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5362 /* Convert to the smaller type before masking out unwanted bits. */
5364 if (lntype != rntype)
5366 if (lnbitsize > rnbitsize)
5368 lhs = fold_convert_loc (loc, rntype, lhs);
5369 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5372 else if (lnbitsize < rnbitsize)
5374 rhs = fold_convert_loc (loc, lntype, rhs);
5375 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5380 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5381 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5383 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5384 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5386 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5392 /* Handle the case of comparisons with constants. If there is something in
5393 common between the masks, those bits of the constants must be the same.
5394 If not, the condition is always false. Test for this to avoid generating
5395 incorrect code below. */
5396 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5397 if (! integer_zerop (result)
5398 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5399 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5401 if (wanted_code == NE_EXPR)
5403 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5404 return constant_boolean_node (true, truth_type);
5408 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5409 return constant_boolean_node (false, truth_type);
5413 /* Construct the expression we will return. First get the component
5414 reference we will make. Unless the mask is all ones the width of
5415 that field, perform the mask operation. Then compare with the
5417 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5418 ll_unsignedp || rl_unsignedp);
5420 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5421 if (! all_ones_mask_p (ll_mask, lnbitsize))
5422 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5424 return build2_loc (loc, wanted_code, truth_type, result,
5425 const_binop (BIT_IOR_EXPR, l_const, r_const));
5428 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5432 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5436 enum tree_code op_code;
5439 int consts_equal, consts_lt;
5442 STRIP_SIGN_NOPS (arg0);
5444 op_code = TREE_CODE (arg0);
5445 minmax_const = TREE_OPERAND (arg0, 1);
5446 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5447 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5448 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5449 inner = TREE_OPERAND (arg0, 0);
5451 /* If something does not permit us to optimize, return the original tree. */
5452 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5453 || TREE_CODE (comp_const) != INTEGER_CST
5454 || TREE_OVERFLOW (comp_const)
5455 || TREE_CODE (minmax_const) != INTEGER_CST
5456 || TREE_OVERFLOW (minmax_const))
5459 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5460 and GT_EXPR, doing the rest with recursive calls using logical
5464 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5467 = optimize_minmax_comparison (loc,
5468 invert_tree_comparison (code, false),
5471 return invert_truthvalue_loc (loc, tem);
5477 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5478 optimize_minmax_comparison
5479 (loc, EQ_EXPR, type, arg0, comp_const),
5480 optimize_minmax_comparison
5481 (loc, GT_EXPR, type, arg0, comp_const));
5484 if (op_code == MAX_EXPR && consts_equal)
5485 /* MAX (X, 0) == 0 -> X <= 0 */
5486 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5488 else if (op_code == MAX_EXPR && consts_lt)
5489 /* MAX (X, 0) == 5 -> X == 5 */
5490 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5492 else if (op_code == MAX_EXPR)
5493 /* MAX (X, 0) == -1 -> false */
5494 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5496 else if (consts_equal)
5497 /* MIN (X, 0) == 0 -> X >= 0 */
5498 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5501 /* MIN (X, 0) == 5 -> false */
5502 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5505 /* MIN (X, 0) == -1 -> X == -1 */
5506 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5509 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5510 /* MAX (X, 0) > 0 -> X > 0
5511 MAX (X, 0) > 5 -> X > 5 */
5512 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5514 else if (op_code == MAX_EXPR)
5515 /* MAX (X, 0) > -1 -> true */
5516 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5518 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5519 /* MIN (X, 0) > 0 -> false
5520 MIN (X, 0) > 5 -> false */
5521 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5524 /* MIN (X, 0) > -1 -> X > -1 */
5525 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5532 /* T is an integer expression that is being multiplied, divided, or taken a
5533 modulus (CODE says which and what kind of divide or modulus) by a
5534 constant C. See if we can eliminate that operation by folding it with
5535 other operations already in T. WIDE_TYPE, if non-null, is a type that
5536 should be used for the computation if wider than our type.
5538 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5539 (X * 2) + (Y * 4). We must, however, be assured that either the original
5540 expression would not overflow or that overflow is undefined for the type
5541 in the language in question.
5543 If we return a non-null expression, it is an equivalent form of the
5544 original computation, but need not be in the original type.
5546 We set *STRICT_OVERFLOW_P to true if the return values depends on
5547 signed overflow being undefined. Otherwise we do not change
5548 *STRICT_OVERFLOW_P. */
5551 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5552 bool *strict_overflow_p)
5554 /* To avoid exponential search depth, refuse to allow recursion past
5555 three levels. Beyond that (1) it's highly unlikely that we'll find
5556 something interesting and (2) we've probably processed it before
5557 when we built the inner expression. */
5566 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5573 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5574 bool *strict_overflow_p)
5576 tree type = TREE_TYPE (t);
5577 enum tree_code tcode = TREE_CODE (t);
5578 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5579 > GET_MODE_SIZE (TYPE_MODE (type)))
5580 ? wide_type : type);
5582 int same_p = tcode == code;
5583 tree op0 = NULL_TREE, op1 = NULL_TREE;
5584 bool sub_strict_overflow_p;
5586 /* Don't deal with constants of zero here; they confuse the code below. */
5587 if (integer_zerop (c))
5590 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5591 op0 = TREE_OPERAND (t, 0);
5593 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5594 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5596 /* Note that we need not handle conditional operations here since fold
5597 already handles those cases. So just do arithmetic here. */
5601 /* For a constant, we can always simplify if we are a multiply
5602 or (for divide and modulus) if it is a multiple of our constant. */
5603 if (code == MULT_EXPR
5604 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5605 return const_binop (code, fold_convert (ctype, t),
5606 fold_convert (ctype, c));
5609 CASE_CONVERT: case NON_LVALUE_EXPR:
5610 /* If op0 is an expression ... */
5611 if ((COMPARISON_CLASS_P (op0)
5612 || UNARY_CLASS_P (op0)
5613 || BINARY_CLASS_P (op0)
5614 || VL_EXP_CLASS_P (op0)
5615 || EXPRESSION_CLASS_P (op0))
5616 /* ... and has wrapping overflow, and its type is smaller
5617 than ctype, then we cannot pass through as widening. */
5618 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5619 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5620 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5621 && (TYPE_PRECISION (ctype)
5622 > TYPE_PRECISION (TREE_TYPE (op0))))
5623 /* ... or this is a truncation (t is narrower than op0),
5624 then we cannot pass through this narrowing. */
5625 || (TYPE_PRECISION (type)
5626 < TYPE_PRECISION (TREE_TYPE (op0)))
5627 /* ... or signedness changes for division or modulus,
5628 then we cannot pass through this conversion. */
5629 || (code != MULT_EXPR
5630 && (TYPE_UNSIGNED (ctype)
5631 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5632 /* ... or has undefined overflow while the converted to
5633 type has not, we cannot do the operation in the inner type
5634 as that would introduce undefined overflow. */
5635 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5636 && !TYPE_OVERFLOW_UNDEFINED (type))))
5639 /* Pass the constant down and see if we can make a simplification. If
5640 we can, replace this expression with the inner simplification for
5641 possible later conversion to our or some other type. */
5642 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5643 && TREE_CODE (t2) == INTEGER_CST
5644 && !TREE_OVERFLOW (t2)
5645 && (0 != (t1 = extract_muldiv (op0, t2, code,
5647 ? ctype : NULL_TREE,
5648 strict_overflow_p))))
5653 /* If widening the type changes it from signed to unsigned, then we
5654 must avoid building ABS_EXPR itself as unsigned. */
5655 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5657 tree cstype = (*signed_type_for) (ctype);
5658 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5661 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5662 return fold_convert (ctype, t1);
5666 /* If the constant is negative, we cannot simplify this. */
5667 if (tree_int_cst_sgn (c) == -1)
5671 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5673 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5676 case MIN_EXPR: case MAX_EXPR:
5677 /* If widening the type changes the signedness, then we can't perform
5678 this optimization as that changes the result. */
5679 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5682 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5683 sub_strict_overflow_p = false;
5684 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5685 &sub_strict_overflow_p)) != 0
5686 && (t2 = extract_muldiv (op1, c, code, wide_type,
5687 &sub_strict_overflow_p)) != 0)
5689 if (tree_int_cst_sgn (c) < 0)
5690 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5691 if (sub_strict_overflow_p)
5692 *strict_overflow_p = true;
5693 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5694 fold_convert (ctype, t2));
5698 case LSHIFT_EXPR: case RSHIFT_EXPR:
5699 /* If the second operand is constant, this is a multiplication
5700 or floor division, by a power of two, so we can treat it that
5701 way unless the multiplier or divisor overflows. Signed
5702 left-shift overflow is implementation-defined rather than
5703 undefined in C90, so do not convert signed left shift into
5705 if (TREE_CODE (op1) == INTEGER_CST
5706 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5707 /* const_binop may not detect overflow correctly,
5708 so check for it explicitly here. */
5709 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5710 && TREE_INT_CST_HIGH (op1) == 0
5711 && 0 != (t1 = fold_convert (ctype,
5712 const_binop (LSHIFT_EXPR,
5715 && !TREE_OVERFLOW (t1))
5716 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5717 ? MULT_EXPR : FLOOR_DIV_EXPR,
5719 fold_convert (ctype, op0),
5721 c, code, wide_type, strict_overflow_p);
5724 case PLUS_EXPR: case MINUS_EXPR:
5725 /* See if we can eliminate the operation on both sides. If we can, we
5726 can return a new PLUS or MINUS. If we can't, the only remaining
5727 cases where we can do anything are if the second operand is a
5729 sub_strict_overflow_p = false;
5730 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5731 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5732 if (t1 != 0 && t2 != 0
5733 && (code == MULT_EXPR
5734 /* If not multiplication, we can only do this if both operands
5735 are divisible by c. */
5736 || (multiple_of_p (ctype, op0, c)
5737 && multiple_of_p (ctype, op1, c))))
5739 if (sub_strict_overflow_p)
5740 *strict_overflow_p = true;
5741 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5742 fold_convert (ctype, t2));
5745 /* If this was a subtraction, negate OP1 and set it to be an addition.
5746 This simplifies the logic below. */
5747 if (tcode == MINUS_EXPR)
5749 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5750 /* If OP1 was not easily negatable, the constant may be OP0. */
5751 if (TREE_CODE (op0) == INTEGER_CST)
5762 if (TREE_CODE (op1) != INTEGER_CST)
5765 /* If either OP1 or C are negative, this optimization is not safe for
5766 some of the division and remainder types while for others we need
5767 to change the code. */
5768 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5770 if (code == CEIL_DIV_EXPR)
5771 code = FLOOR_DIV_EXPR;
5772 else if (code == FLOOR_DIV_EXPR)
5773 code = CEIL_DIV_EXPR;
5774 else if (code != MULT_EXPR
5775 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5779 /* If it's a multiply or a division/modulus operation of a multiple
5780 of our constant, do the operation and verify it doesn't overflow. */
5781 if (code == MULT_EXPR
5782 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5784 op1 = const_binop (code, fold_convert (ctype, op1),
5785 fold_convert (ctype, c));
5786 /* We allow the constant to overflow with wrapping semantics. */
5788 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5794 /* If we have an unsigned type is not a sizetype, we cannot widen
5795 the operation since it will change the result if the original
5796 computation overflowed. */
5797 if (TYPE_UNSIGNED (ctype)
5798 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5802 /* If we were able to eliminate our operation from the first side,
5803 apply our operation to the second side and reform the PLUS. */
5804 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5805 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5807 /* The last case is if we are a multiply. In that case, we can
5808 apply the distributive law to commute the multiply and addition
5809 if the multiplication of the constants doesn't overflow. */
5810 if (code == MULT_EXPR)
5811 return fold_build2 (tcode, ctype,
5812 fold_build2 (code, ctype,
5813 fold_convert (ctype, op0),
5814 fold_convert (ctype, c)),
5820 /* We have a special case here if we are doing something like
5821 (C * 8) % 4 since we know that's zero. */
5822 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5823 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5824 /* If the multiplication can overflow we cannot optimize this.
5825 ??? Until we can properly mark individual operations as
5826 not overflowing we need to treat sizetype special here as
5827 stor-layout relies on this opimization to make
5828 DECL_FIELD_BIT_OFFSET always a constant. */
5829 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5830 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5831 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5832 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5833 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5835 *strict_overflow_p = true;
5836 return omit_one_operand (type, integer_zero_node, op0);
5839 /* ... fall through ... */
5841 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5842 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5843 /* If we can extract our operation from the LHS, do so and return a
5844 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5845 do something only if the second operand is a constant. */
5847 && (t1 = extract_muldiv (op0, c, code, wide_type,
5848 strict_overflow_p)) != 0)
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5850 fold_convert (ctype, op1));
5851 else if (tcode == MULT_EXPR && code == MULT_EXPR
5852 && (t1 = extract_muldiv (op1, c, code, wide_type,
5853 strict_overflow_p)) != 0)
5854 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5855 fold_convert (ctype, t1));
5856 else if (TREE_CODE (op1) != INTEGER_CST)
5859 /* If these are the same operation types, we can associate them
5860 assuming no overflow. */
5865 mul = double_int_mul_with_sign
5867 (tree_to_double_int (op1),
5868 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5870 (tree_to_double_int (c),
5871 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5872 false, &overflow_p);
5873 overflow_p = (((!TYPE_UNSIGNED (ctype)
5874 || (TREE_CODE (ctype) == INTEGER_TYPE
5875 && TYPE_IS_SIZETYPE (ctype)))
5877 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5878 if (!double_int_fits_to_tree_p (ctype, mul)
5879 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5880 || !TYPE_UNSIGNED (ctype)
5881 || (TREE_CODE (ctype) == INTEGER_TYPE
5882 && TYPE_IS_SIZETYPE (ctype))))
5885 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5886 double_int_to_tree (ctype, mul));
5889 /* If these operations "cancel" each other, we have the main
5890 optimizations of this pass, which occur when either constant is a
5891 multiple of the other, in which case we replace this with either an
5892 operation or CODE or TCODE.
5894 If we have an unsigned type that is not a sizetype, we cannot do
5895 this since it will change the result if the original computation
5897 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5898 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5899 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5900 || (tcode == MULT_EXPR
5901 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5902 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5903 && code != MULT_EXPR)))
5905 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5907 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5908 *strict_overflow_p = true;
5909 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5910 fold_convert (ctype,
5911 const_binop (TRUNC_DIV_EXPR,
5914 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5916 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5917 *strict_overflow_p = true;
5918 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5919 fold_convert (ctype,
5920 const_binop (TRUNC_DIV_EXPR,
5933 /* Return a node which has the indicated constant VALUE (either 0 or
5934 1), and is of the indicated TYPE. */
5937 constant_boolean_node (int value, tree type)
5939 if (type == integer_type_node)
5940 return value ? integer_one_node : integer_zero_node;
5941 else if (type == boolean_type_node)
5942 return value ? boolean_true_node : boolean_false_node;
5944 return build_int_cst (type, value);
5948 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5949 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5950 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5951 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5952 COND is the first argument to CODE; otherwise (as in the example
5953 given here), it is the second argument. TYPE is the type of the
5954 original expression. Return NULL_TREE if no simplification is
5958 fold_binary_op_with_conditional_arg (location_t loc,
5959 enum tree_code code,
5960 tree type, tree op0, tree op1,
5961 tree cond, tree arg, int cond_first_p)
5963 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5964 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5965 tree test, true_value, false_value;
5966 tree lhs = NULL_TREE;
5967 tree rhs = NULL_TREE;
5969 if (TREE_CODE (cond) == COND_EXPR)
5971 test = TREE_OPERAND (cond, 0);
5972 true_value = TREE_OPERAND (cond, 1);
5973 false_value = TREE_OPERAND (cond, 2);
5974 /* If this operand throws an expression, then it does not make
5975 sense to try to perform a logical or arithmetic operation
5977 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5979 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5984 tree testtype = TREE_TYPE (cond);
5986 true_value = constant_boolean_node (true, testtype);
5987 false_value = constant_boolean_node (false, testtype);
5990 /* This transformation is only worthwhile if we don't have to wrap ARG
5991 in a SAVE_EXPR and the operation can be simplified on at least one
5992 of the branches once its pushed inside the COND_EXPR. */
5993 if (!TREE_CONSTANT (arg)
5994 && (TREE_SIDE_EFFECTS (arg)
5995 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5998 arg = fold_convert_loc (loc, arg_type, arg);
6001 true_value = fold_convert_loc (loc, cond_type, true_value);
6003 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6005 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6009 false_value = fold_convert_loc (loc, cond_type, false_value);
6011 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6013 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6016 /* Check that we have simplified at least one of the branches. */
6017 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6020 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6024 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6026 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6027 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6028 ADDEND is the same as X.
6030 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6031 and finite. The problematic cases are when X is zero, and its mode
6032 has signed zeros. In the case of rounding towards -infinity,
6033 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6034 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6037 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6039 if (!real_zerop (addend))
6042 /* Don't allow the fold with -fsignaling-nans. */
6043 if (HONOR_SNANS (TYPE_MODE (type)))
6046 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6047 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6050 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6051 if (TREE_CODE (addend) == REAL_CST
6052 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6055 /* The mode has signed zeros, and we have to honor their sign.
6056 In this situation, there is only one case we can return true for.
6057 X - 0 is the same as X unless rounding towards -infinity is
6059 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6062 /* Subroutine of fold() that checks comparisons of built-in math
6063 functions against real constants.
6065 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6066 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6067 is the type of the result and ARG0 and ARG1 are the operands of the
6068 comparison. ARG1 must be a TREE_REAL_CST.
6070 The function returns the constant folded tree if a simplification
6071 can be made, and NULL_TREE otherwise. */
6074 fold_mathfn_compare (location_t loc,
6075 enum built_in_function fcode, enum tree_code code,
6076 tree type, tree arg0, tree arg1)
6080 if (BUILTIN_SQRT_P (fcode))
6082 tree arg = CALL_EXPR_ARG (arg0, 0);
6083 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6085 c = TREE_REAL_CST (arg1);
6086 if (REAL_VALUE_NEGATIVE (c))
6088 /* sqrt(x) < y is always false, if y is negative. */
6089 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6090 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6092 /* sqrt(x) > y is always true, if y is negative and we
6093 don't care about NaNs, i.e. negative values of x. */
6094 if (code == NE_EXPR || !HONOR_NANS (mode))
6095 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6097 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6098 return fold_build2_loc (loc, GE_EXPR, type, arg,
6099 build_real (TREE_TYPE (arg), dconst0));
6101 else if (code == GT_EXPR || code == GE_EXPR)
6105 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6106 real_convert (&c2, mode, &c2);
6108 if (REAL_VALUE_ISINF (c2))
6110 /* sqrt(x) > y is x == +Inf, when y is very large. */
6111 if (HONOR_INFINITIES (mode))
6112 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6113 build_real (TREE_TYPE (arg), c2));
6115 /* sqrt(x) > y is always false, when y is very large
6116 and we don't care about infinities. */
6117 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6120 /* sqrt(x) > c is the same as x > c*c. */
6121 return fold_build2_loc (loc, code, type, arg,
6122 build_real (TREE_TYPE (arg), c2));
6124 else if (code == LT_EXPR || code == LE_EXPR)
6128 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6129 real_convert (&c2, mode, &c2);
6131 if (REAL_VALUE_ISINF (c2))
6133 /* sqrt(x) < y is always true, when y is a very large
6134 value and we don't care about NaNs or Infinities. */
6135 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6136 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6138 /* sqrt(x) < y is x != +Inf when y is very large and we
6139 don't care about NaNs. */
6140 if (! HONOR_NANS (mode))
6141 return fold_build2_loc (loc, NE_EXPR, type, arg,
6142 build_real (TREE_TYPE (arg), c2));
6144 /* sqrt(x) < y is x >= 0 when y is very large and we
6145 don't care about Infinities. */
6146 if (! HONOR_INFINITIES (mode))
6147 return fold_build2_loc (loc, GE_EXPR, type, arg,
6148 build_real (TREE_TYPE (arg), dconst0));
6150 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6151 arg = save_expr (arg);
6152 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6153 fold_build2_loc (loc, GE_EXPR, type, arg,
6154 build_real (TREE_TYPE (arg),
6156 fold_build2_loc (loc, NE_EXPR, type, arg,
6157 build_real (TREE_TYPE (arg),
6161 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6162 if (! HONOR_NANS (mode))
6163 return fold_build2_loc (loc, code, type, arg,
6164 build_real (TREE_TYPE (arg), c2));
6166 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6167 arg = save_expr (arg);
6168 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6169 fold_build2_loc (loc, GE_EXPR, type, arg,
6170 build_real (TREE_TYPE (arg),
6172 fold_build2_loc (loc, code, type, arg,
6173 build_real (TREE_TYPE (arg),
6181 /* Subroutine of fold() that optimizes comparisons against Infinities,
6182 either +Inf or -Inf.
6184 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6185 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6186 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6188 The function returns the constant folded tree if a simplification
6189 can be made, and NULL_TREE otherwise. */
6192 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6193 tree arg0, tree arg1)
6195 enum machine_mode mode;
6196 REAL_VALUE_TYPE max;
6200 mode = TYPE_MODE (TREE_TYPE (arg0));
6202 /* For negative infinity swap the sense of the comparison. */
6203 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6205 code = swap_tree_comparison (code);
6210 /* x > +Inf is always false, if with ignore sNANs. */
6211 if (HONOR_SNANS (mode))
6213 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6216 /* x <= +Inf is always true, if we don't case about NaNs. */
6217 if (! HONOR_NANS (mode))
6218 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6220 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6221 arg0 = save_expr (arg0);
6222 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6226 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6227 real_maxval (&max, neg, mode);
6228 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6229 arg0, build_real (TREE_TYPE (arg0), max));
6232 /* x < +Inf is always equal to x <= DBL_MAX. */
6233 real_maxval (&max, neg, mode);
6234 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6235 arg0, build_real (TREE_TYPE (arg0), max));
6238 /* x != +Inf is always equal to !(x > DBL_MAX). */
6239 real_maxval (&max, neg, mode);
6240 if (! HONOR_NANS (mode))
6241 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6242 arg0, build_real (TREE_TYPE (arg0), max));
6244 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6245 arg0, build_real (TREE_TYPE (arg0), max));
6246 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6255 /* Subroutine of fold() that optimizes comparisons of a division by
6256 a nonzero integer constant against an integer constant, i.e.
6259 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6260 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6261 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6263 The function returns the constant folded tree if a simplification
6264 can be made, and NULL_TREE otherwise. */
6267 fold_div_compare (location_t loc,
6268 enum tree_code code, tree type, tree arg0, tree arg1)
6270 tree prod, tmp, hi, lo;
6271 tree arg00 = TREE_OPERAND (arg0, 0);
6272 tree arg01 = TREE_OPERAND (arg0, 1);
6274 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6278 /* We have to do this the hard way to detect unsigned overflow.
6279 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6280 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6281 TREE_INT_CST_HIGH (arg01),
6282 TREE_INT_CST_LOW (arg1),
6283 TREE_INT_CST_HIGH (arg1),
6284 &val.low, &val.high, unsigned_p);
6285 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6286 neg_overflow = false;
6290 tmp = int_const_binop (MINUS_EXPR, arg01,
6291 build_int_cst (TREE_TYPE (arg01), 1));
6294 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6295 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6296 TREE_INT_CST_HIGH (prod),
6297 TREE_INT_CST_LOW (tmp),
6298 TREE_INT_CST_HIGH (tmp),
6299 &val.low, &val.high, unsigned_p);
6300 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6301 -1, overflow | TREE_OVERFLOW (prod));
6303 else if (tree_int_cst_sgn (arg01) >= 0)
6305 tmp = int_const_binop (MINUS_EXPR, arg01,
6306 build_int_cst (TREE_TYPE (arg01), 1));
6307 switch (tree_int_cst_sgn (arg1))
6310 neg_overflow = true;
6311 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6316 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6321 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6331 /* A negative divisor reverses the relational operators. */
6332 code = swap_tree_comparison (code);
6334 tmp = int_const_binop (PLUS_EXPR, arg01,
6335 build_int_cst (TREE_TYPE (arg01), 1));
6336 switch (tree_int_cst_sgn (arg1))
6339 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6344 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6349 neg_overflow = true;
6350 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6362 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6363 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6364 if (TREE_OVERFLOW (hi))
6365 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6366 if (TREE_OVERFLOW (lo))
6367 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6368 return build_range_check (loc, type, arg00, 1, lo, hi);
6371 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6372 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6373 if (TREE_OVERFLOW (hi))
6374 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6375 if (TREE_OVERFLOW (lo))
6376 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6377 return build_range_check (loc, type, arg00, 0, lo, hi);
6380 if (TREE_OVERFLOW (lo))
6382 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6383 return omit_one_operand_loc (loc, type, tmp, arg00);
6385 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6388 if (TREE_OVERFLOW (hi))
6390 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6391 return omit_one_operand_loc (loc, type, tmp, arg00);
6393 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6396 if (TREE_OVERFLOW (hi))
6398 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6399 return omit_one_operand_loc (loc, type, tmp, arg00);
6401 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6404 if (TREE_OVERFLOW (lo))
6406 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6407 return omit_one_operand_loc (loc, type, tmp, arg00);
6409 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6419 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6420 equality/inequality test, then return a simplified form of the test
6421 using a sign testing. Otherwise return NULL. TYPE is the desired
6425 fold_single_bit_test_into_sign_test (location_t loc,
6426 enum tree_code code, tree arg0, tree arg1,
6429 /* If this is testing a single bit, we can optimize the test. */
6430 if ((code == NE_EXPR || code == EQ_EXPR)
6431 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6432 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6434 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6435 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6436 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6438 if (arg00 != NULL_TREE
6439 /* This is only a win if casting to a signed type is cheap,
6440 i.e. when arg00's type is not a partial mode. */
6441 && TYPE_PRECISION (TREE_TYPE (arg00))
6442 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6444 tree stype = signed_type_for (TREE_TYPE (arg00));
6445 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6447 fold_convert_loc (loc, stype, arg00),
6448 build_int_cst (stype, 0));
6455 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6456 equality/inequality test, then return a simplified form of
6457 the test using shifts and logical operations. Otherwise return
6458 NULL. TYPE is the desired result type. */
6461 fold_single_bit_test (location_t loc, enum tree_code code,
6462 tree arg0, tree arg1, tree result_type)
6464 /* If this is testing a single bit, we can optimize the test. */
6465 if ((code == NE_EXPR || code == EQ_EXPR)
6466 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6467 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6469 tree inner = TREE_OPERAND (arg0, 0);
6470 tree type = TREE_TYPE (arg0);
6471 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6472 enum machine_mode operand_mode = TYPE_MODE (type);
6474 tree signed_type, unsigned_type, intermediate_type;
6477 /* First, see if we can fold the single bit test into a sign-bit
6479 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6484 /* Otherwise we have (A & C) != 0 where C is a single bit,
6485 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6486 Similarly for (A & C) == 0. */
6488 /* If INNER is a right shift of a constant and it plus BITNUM does
6489 not overflow, adjust BITNUM and INNER. */
6490 if (TREE_CODE (inner) == RSHIFT_EXPR
6491 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6492 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6493 && bitnum < TYPE_PRECISION (type)
6494 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6495 bitnum - TYPE_PRECISION (type)))
6497 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6498 inner = TREE_OPERAND (inner, 0);
6501 /* If we are going to be able to omit the AND below, we must do our
6502 operations as unsigned. If we must use the AND, we have a choice.
6503 Normally unsigned is faster, but for some machines signed is. */
6504 #ifdef LOAD_EXTEND_OP
6505 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6506 && !flag_syntax_only) ? 0 : 1;
6511 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6512 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6513 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6514 inner = fold_convert_loc (loc, intermediate_type, inner);
6517 inner = build2 (RSHIFT_EXPR, intermediate_type,
6518 inner, size_int (bitnum));
6520 one = build_int_cst (intermediate_type, 1);
6522 if (code == EQ_EXPR)
6523 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6525 /* Put the AND last so it can combine with more things. */
6526 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6528 /* Make sure to return the proper type. */
6529 inner = fold_convert_loc (loc, result_type, inner);
6536 /* Check whether we are allowed to reorder operands arg0 and arg1,
6537 such that the evaluation of arg1 occurs before arg0. */
6540 reorder_operands_p (const_tree arg0, const_tree arg1)
6542 if (! flag_evaluation_order)
6544 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6546 return ! TREE_SIDE_EFFECTS (arg0)
6547 && ! TREE_SIDE_EFFECTS (arg1);
6550 /* Test whether it is preferable two swap two operands, ARG0 and
6551 ARG1, for example because ARG0 is an integer constant and ARG1
6552 isn't. If REORDER is true, only recommend swapping if we can
6553 evaluate the operands in reverse order. */
6556 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6558 STRIP_SIGN_NOPS (arg0);
6559 STRIP_SIGN_NOPS (arg1);
6561 if (TREE_CODE (arg1) == INTEGER_CST)
6563 if (TREE_CODE (arg0) == INTEGER_CST)
6566 if (TREE_CODE (arg1) == REAL_CST)
6568 if (TREE_CODE (arg0) == REAL_CST)
6571 if (TREE_CODE (arg1) == FIXED_CST)
6573 if (TREE_CODE (arg0) == FIXED_CST)
6576 if (TREE_CODE (arg1) == COMPLEX_CST)
6578 if (TREE_CODE (arg0) == COMPLEX_CST)
6581 if (TREE_CONSTANT (arg1))
6583 if (TREE_CONSTANT (arg0))
6586 if (optimize_function_for_size_p (cfun))
6589 if (reorder && flag_evaluation_order
6590 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6593 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6594 for commutative and comparison operators. Ensuring a canonical
6595 form allows the optimizers to find additional redundancies without
6596 having to explicitly check for both orderings. */
6597 if (TREE_CODE (arg0) == SSA_NAME
6598 && TREE_CODE (arg1) == SSA_NAME
6599 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6602 /* Put SSA_NAMEs last. */
6603 if (TREE_CODE (arg1) == SSA_NAME)
6605 if (TREE_CODE (arg0) == SSA_NAME)
6608 /* Put variables last. */
6617 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6618 ARG0 is extended to a wider type. */
6621 fold_widened_comparison (location_t loc, enum tree_code code,
6622 tree type, tree arg0, tree arg1)
6624 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6626 tree shorter_type, outer_type;
6630 if (arg0_unw == arg0)
6632 shorter_type = TREE_TYPE (arg0_unw);
6634 #ifdef HAVE_canonicalize_funcptr_for_compare
6635 /* Disable this optimization if we're casting a function pointer
6636 type on targets that require function pointer canonicalization. */
6637 if (HAVE_canonicalize_funcptr_for_compare
6638 && TREE_CODE (shorter_type) == POINTER_TYPE
6639 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6643 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6646 arg1_unw = get_unwidened (arg1, NULL_TREE);
6648 /* If possible, express the comparison in the shorter mode. */
6649 if ((code == EQ_EXPR || code == NE_EXPR
6650 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6651 && (TREE_TYPE (arg1_unw) == shorter_type
6652 || ((TYPE_PRECISION (shorter_type)
6653 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6654 && (TYPE_UNSIGNED (shorter_type)
6655 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6656 || (TREE_CODE (arg1_unw) == INTEGER_CST
6657 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6658 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6659 && int_fits_type_p (arg1_unw, shorter_type))))
6660 return fold_build2_loc (loc, code, type, arg0_unw,
6661 fold_convert_loc (loc, shorter_type, arg1_unw));
6663 if (TREE_CODE (arg1_unw) != INTEGER_CST
6664 || TREE_CODE (shorter_type) != INTEGER_TYPE
6665 || !int_fits_type_p (arg1_unw, shorter_type))
6668 /* If we are comparing with the integer that does not fit into the range
6669 of the shorter type, the result is known. */
6670 outer_type = TREE_TYPE (arg1_unw);
6671 min = lower_bound_in_type (outer_type, shorter_type);
6672 max = upper_bound_in_type (outer_type, shorter_type);
6674 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6676 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6683 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6688 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6694 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6696 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6701 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6703 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6712 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6713 ARG0 just the signedness is changed. */
6716 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6717 tree arg0, tree arg1)
6720 tree inner_type, outer_type;
6722 if (!CONVERT_EXPR_P (arg0))
6725 outer_type = TREE_TYPE (arg0);
6726 arg0_inner = TREE_OPERAND (arg0, 0);
6727 inner_type = TREE_TYPE (arg0_inner);
6729 #ifdef HAVE_canonicalize_funcptr_for_compare
6730 /* Disable this optimization if we're casting a function pointer
6731 type on targets that require function pointer canonicalization. */
6732 if (HAVE_canonicalize_funcptr_for_compare
6733 && TREE_CODE (inner_type) == POINTER_TYPE
6734 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6738 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6741 if (TREE_CODE (arg1) != INTEGER_CST
6742 && !(CONVERT_EXPR_P (arg1)
6743 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6746 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6747 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6752 if (TREE_CODE (arg1) == INTEGER_CST)
6753 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6754 0, TREE_OVERFLOW (arg1));
6756 arg1 = fold_convert_loc (loc, inner_type, arg1);
6758 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6761 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6762 step of the array. Reconstructs s and delta in the case of s *
6763 delta being an integer constant (and thus already folded). ADDR is
6764 the address. MULT is the multiplicative expression. If the
6765 function succeeds, the new address expression is returned.
6766 Otherwise NULL_TREE is returned. LOC is the location of the
6767 resulting expression. */
6770 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6772 tree s, delta, step;
6773 tree ref = TREE_OPERAND (addr, 0), pref;
6778 /* Strip the nops that might be added when converting op1 to sizetype. */
6781 /* Canonicalize op1 into a possibly non-constant delta
6782 and an INTEGER_CST s. */
6783 if (TREE_CODE (op1) == MULT_EXPR)
6785 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6790 if (TREE_CODE (arg0) == INTEGER_CST)
6795 else if (TREE_CODE (arg1) == INTEGER_CST)
6803 else if (TREE_CODE (op1) == INTEGER_CST)
6810 /* Simulate we are delta * 1. */
6812 s = integer_one_node;
6815 for (;; ref = TREE_OPERAND (ref, 0))
6817 if (TREE_CODE (ref) == ARRAY_REF)
6821 /* Remember if this was a multi-dimensional array. */
6822 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6825 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6828 itype = TREE_TYPE (domain);
6830 step = array_ref_element_size (ref);
6831 if (TREE_CODE (step) != INTEGER_CST)
6836 if (! tree_int_cst_equal (step, s))
6841 /* Try if delta is a multiple of step. */
6842 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6848 /* Only fold here if we can verify we do not overflow one
6849 dimension of a multi-dimensional array. */
6854 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6855 || !TYPE_MAX_VALUE (domain)
6856 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6859 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6860 fold_convert_loc (loc, itype,
6861 TREE_OPERAND (ref, 1)),
6862 fold_convert_loc (loc, itype, delta));
6864 || TREE_CODE (tmp) != INTEGER_CST
6865 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6874 if (!handled_component_p (ref))
6878 /* We found the suitable array reference. So copy everything up to it,
6879 and replace the index. */
6881 pref = TREE_OPERAND (addr, 0);
6882 ret = copy_node (pref);
6883 SET_EXPR_LOCATION (ret, loc);
6888 pref = TREE_OPERAND (pref, 0);
6889 TREE_OPERAND (pos, 0) = copy_node (pref);
6890 pos = TREE_OPERAND (pos, 0);
6893 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6894 fold_convert_loc (loc, itype,
6895 TREE_OPERAND (pos, 1)),
6896 fold_convert_loc (loc, itype, delta));
6898 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6902 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6903 means A >= Y && A != MAX, but in this case we know that
6904 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6907 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6909 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6911 if (TREE_CODE (bound) == LT_EXPR)
6912 a = TREE_OPERAND (bound, 0);
6913 else if (TREE_CODE (bound) == GT_EXPR)
6914 a = TREE_OPERAND (bound, 1);
6918 typea = TREE_TYPE (a);
6919 if (!INTEGRAL_TYPE_P (typea)
6920 && !POINTER_TYPE_P (typea))
6923 if (TREE_CODE (ineq) == LT_EXPR)
6925 a1 = TREE_OPERAND (ineq, 1);
6926 y = TREE_OPERAND (ineq, 0);
6928 else if (TREE_CODE (ineq) == GT_EXPR)
6930 a1 = TREE_OPERAND (ineq, 0);
6931 y = TREE_OPERAND (ineq, 1);
6936 if (TREE_TYPE (a1) != typea)
6939 if (POINTER_TYPE_P (typea))
6941 /* Convert the pointer types into integer before taking the difference. */
6942 tree ta = fold_convert_loc (loc, ssizetype, a);
6943 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6944 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6947 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6949 if (!diff || !integer_onep (diff))
6952 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6955 /* Fold a sum or difference of at least one multiplication.
6956 Returns the folded tree or NULL if no simplification could be made. */
6959 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6960 tree arg0, tree arg1)
6962 tree arg00, arg01, arg10, arg11;
6963 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6965 /* (A * C) +- (B * C) -> (A+-B) * C.
6966 (A * C) +- A -> A * (C+-1).
6967 We are most concerned about the case where C is a constant,
6968 but other combinations show up during loop reduction. Since
6969 it is not difficult, try all four possibilities. */
6971 if (TREE_CODE (arg0) == MULT_EXPR)
6973 arg00 = TREE_OPERAND (arg0, 0);
6974 arg01 = TREE_OPERAND (arg0, 1);
6976 else if (TREE_CODE (arg0) == INTEGER_CST)
6978 arg00 = build_one_cst (type);
6983 /* We cannot generate constant 1 for fract. */
6984 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6987 arg01 = build_one_cst (type);
6989 if (TREE_CODE (arg1) == MULT_EXPR)
6991 arg10 = TREE_OPERAND (arg1, 0);
6992 arg11 = TREE_OPERAND (arg1, 1);
6994 else if (TREE_CODE (arg1) == INTEGER_CST)
6996 arg10 = build_one_cst (type);
6997 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6998 the purpose of this canonicalization. */
6999 if (TREE_INT_CST_HIGH (arg1) == -1
7000 && negate_expr_p (arg1)
7001 && code == PLUS_EXPR)
7003 arg11 = negate_expr (arg1);
7011 /* We cannot generate constant 1 for fract. */
7012 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7015 arg11 = build_one_cst (type);
7019 if (operand_equal_p (arg01, arg11, 0))
7020 same = arg01, alt0 = arg00, alt1 = arg10;
7021 else if (operand_equal_p (arg00, arg10, 0))
7022 same = arg00, alt0 = arg01, alt1 = arg11;
7023 else if (operand_equal_p (arg00, arg11, 0))
7024 same = arg00, alt0 = arg01, alt1 = arg10;
7025 else if (operand_equal_p (arg01, arg10, 0))
7026 same = arg01, alt0 = arg00, alt1 = arg11;
7028 /* No identical multiplicands; see if we can find a common
7029 power-of-two factor in non-power-of-two multiplies. This
7030 can help in multi-dimensional array access. */
7031 else if (host_integerp (arg01, 0)
7032 && host_integerp (arg11, 0))
7034 HOST_WIDE_INT int01, int11, tmp;
7037 int01 = TREE_INT_CST_LOW (arg01);
7038 int11 = TREE_INT_CST_LOW (arg11);
7040 /* Move min of absolute values to int11. */
7041 if ((int01 >= 0 ? int01 : -int01)
7042 < (int11 >= 0 ? int11 : -int11))
7044 tmp = int01, int01 = int11, int11 = tmp;
7045 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7052 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7053 /* The remainder should not be a constant, otherwise we
7054 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7055 increased the number of multiplications necessary. */
7056 && TREE_CODE (arg10) != INTEGER_CST)
7058 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7059 build_int_cst (TREE_TYPE (arg00),
7064 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7069 return fold_build2_loc (loc, MULT_EXPR, type,
7070 fold_build2_loc (loc, code, type,
7071 fold_convert_loc (loc, type, alt0),
7072 fold_convert_loc (loc, type, alt1)),
7073 fold_convert_loc (loc, type, same));
7078 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7079 specified by EXPR into the buffer PTR of length LEN bytes.
7080 Return the number of bytes placed in the buffer, or zero
7084 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7086 tree type = TREE_TYPE (expr);
7087 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7088 int byte, offset, word, words;
7089 unsigned char value;
7091 if (total_bytes > len)
7093 words = total_bytes / UNITS_PER_WORD;
7095 for (byte = 0; byte < total_bytes; byte++)
7097 int bitpos = byte * BITS_PER_UNIT;
7098 if (bitpos < HOST_BITS_PER_WIDE_INT)
7099 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7101 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7102 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7104 if (total_bytes > UNITS_PER_WORD)
7106 word = byte / UNITS_PER_WORD;
7107 if (WORDS_BIG_ENDIAN)
7108 word = (words - 1) - word;
7109 offset = word * UNITS_PER_WORD;
7110 if (BYTES_BIG_ENDIAN)
7111 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7113 offset += byte % UNITS_PER_WORD;
7116 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7117 ptr[offset] = value;
7123 /* Subroutine of native_encode_expr. Encode the REAL_CST
7124 specified by EXPR into the buffer PTR of length LEN bytes.
7125 Return the number of bytes placed in the buffer, or zero
7129 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7131 tree type = TREE_TYPE (expr);
7132 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7133 int byte, offset, word, words, bitpos;
7134 unsigned char value;
7136 /* There are always 32 bits in each long, no matter the size of
7137 the hosts long. We handle floating point representations with
7141 if (total_bytes > len)
7143 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7145 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7147 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7148 bitpos += BITS_PER_UNIT)
7150 byte = (bitpos / BITS_PER_UNIT) & 3;
7151 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7153 if (UNITS_PER_WORD < 4)
7155 word = byte / UNITS_PER_WORD;
7156 if (WORDS_BIG_ENDIAN)
7157 word = (words - 1) - word;
7158 offset = word * UNITS_PER_WORD;
7159 if (BYTES_BIG_ENDIAN)
7160 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7162 offset += byte % UNITS_PER_WORD;
7165 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7166 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7171 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7172 specified by EXPR into the buffer PTR of length LEN bytes.
7173 Return the number of bytes placed in the buffer, or zero
7177 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7182 part = TREE_REALPART (expr);
7183 rsize = native_encode_expr (part, ptr, len);
7186 part = TREE_IMAGPART (expr);
7187 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7190 return rsize + isize;
7194 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7195 specified by EXPR into the buffer PTR of length LEN bytes.
7196 Return the number of bytes placed in the buffer, or zero
7200 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7202 int i, size, offset, count;
7203 tree itype, elem, elements;
7206 elements = TREE_VECTOR_CST_ELTS (expr);
7207 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7208 itype = TREE_TYPE (TREE_TYPE (expr));
7209 size = GET_MODE_SIZE (TYPE_MODE (itype));
7210 for (i = 0; i < count; i++)
7214 elem = TREE_VALUE (elements);
7215 elements = TREE_CHAIN (elements);
7222 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7227 if (offset + size > len)
7229 memset (ptr+offset, 0, size);
7237 /* Subroutine of native_encode_expr. Encode the STRING_CST
7238 specified by EXPR into the buffer PTR of length LEN bytes.
7239 Return the number of bytes placed in the buffer, or zero
7243 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7245 tree type = TREE_TYPE (expr);
7246 HOST_WIDE_INT total_bytes;
7248 if (TREE_CODE (type) != ARRAY_TYPE
7249 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7250 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7251 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7253 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7254 if (total_bytes > len)
7256 if (TREE_STRING_LENGTH (expr) < total_bytes)
7258 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7259 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7260 total_bytes - TREE_STRING_LENGTH (expr));
7263 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7268 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7269 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7270 buffer PTR of length LEN bytes. Return the number of bytes
7271 placed in the buffer, or zero upon failure. */
7274 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7276 switch (TREE_CODE (expr))
7279 return native_encode_int (expr, ptr, len);
7282 return native_encode_real (expr, ptr, len);
7285 return native_encode_complex (expr, ptr, len);
7288 return native_encode_vector (expr, ptr, len);
7291 return native_encode_string (expr, ptr, len);
7299 /* Subroutine of native_interpret_expr. Interpret the contents of
7300 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7301 If the buffer cannot be interpreted, return NULL_TREE. */
7304 native_interpret_int (tree type, const unsigned char *ptr, int len)
7306 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7307 int byte, offset, word, words;
7308 unsigned char value;
7311 if (total_bytes > len)
7313 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7316 result = double_int_zero;
7317 words = total_bytes / UNITS_PER_WORD;
7319 for (byte = 0; byte < total_bytes; byte++)
7321 int bitpos = byte * BITS_PER_UNIT;
7322 if (total_bytes > UNITS_PER_WORD)
7324 word = byte / UNITS_PER_WORD;
7325 if (WORDS_BIG_ENDIAN)
7326 word = (words - 1) - word;
7327 offset = word * UNITS_PER_WORD;
7328 if (BYTES_BIG_ENDIAN)
7329 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7331 offset += byte % UNITS_PER_WORD;
7334 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7335 value = ptr[offset];
7337 if (bitpos < HOST_BITS_PER_WIDE_INT)
7338 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7340 result.high |= (unsigned HOST_WIDE_INT) value
7341 << (bitpos - HOST_BITS_PER_WIDE_INT);
7344 return double_int_to_tree (type, result);
7348 /* Subroutine of native_interpret_expr. Interpret the contents of
7349 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7350 If the buffer cannot be interpreted, return NULL_TREE. */
7353 native_interpret_real (tree type, const unsigned char *ptr, int len)
7355 enum machine_mode mode = TYPE_MODE (type);
7356 int total_bytes = GET_MODE_SIZE (mode);
7357 int byte, offset, word, words, bitpos;
7358 unsigned char value;
7359 /* There are always 32 bits in each long, no matter the size of
7360 the hosts long. We handle floating point representations with
7365 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7366 if (total_bytes > len || total_bytes > 24)
7368 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7370 memset (tmp, 0, sizeof (tmp));
7371 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7372 bitpos += BITS_PER_UNIT)
7374 byte = (bitpos / BITS_PER_UNIT) & 3;
7375 if (UNITS_PER_WORD < 4)
7377 word = byte / UNITS_PER_WORD;
7378 if (WORDS_BIG_ENDIAN)
7379 word = (words - 1) - word;
7380 offset = word * UNITS_PER_WORD;
7381 if (BYTES_BIG_ENDIAN)
7382 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7384 offset += byte % UNITS_PER_WORD;
7387 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7388 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7390 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7393 real_from_target (&r, tmp, mode);
7394 return build_real (type, r);
7398 /* Subroutine of native_interpret_expr. Interpret the contents of
7399 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7400 If the buffer cannot be interpreted, return NULL_TREE. */
7403 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7405 tree etype, rpart, ipart;
7408 etype = TREE_TYPE (type);
7409 size = GET_MODE_SIZE (TYPE_MODE (etype));
7412 rpart = native_interpret_expr (etype, ptr, size);
7415 ipart = native_interpret_expr (etype, ptr+size, size);
7418 return build_complex (type, rpart, ipart);
7422 /* Subroutine of native_interpret_expr. Interpret the contents of
7423 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7424 If the buffer cannot be interpreted, return NULL_TREE. */
7427 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7429 tree etype, elem, elements;
7432 etype = TREE_TYPE (type);
7433 size = GET_MODE_SIZE (TYPE_MODE (etype));
7434 count = TYPE_VECTOR_SUBPARTS (type);
7435 if (size * count > len)
7438 elements = NULL_TREE;
7439 for (i = count - 1; i >= 0; i--)
7441 elem = native_interpret_expr (etype, ptr+(i*size), size);
7444 elements = tree_cons (NULL_TREE, elem, elements);
7446 return build_vector (type, elements);
7450 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7451 the buffer PTR of length LEN as a constant of type TYPE. For
7452 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7453 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7454 return NULL_TREE. */
7457 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7459 switch (TREE_CODE (type))
7464 return native_interpret_int (type, ptr, len);
7467 return native_interpret_real (type, ptr, len);
7470 return native_interpret_complex (type, ptr, len);
7473 return native_interpret_vector (type, ptr, len);
7481 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7482 TYPE at compile-time. If we're unable to perform the conversion
7483 return NULL_TREE. */
7486 fold_view_convert_expr (tree type, tree expr)
7488 /* We support up to 512-bit values (for V8DFmode). */
7489 unsigned char buffer[64];
7492 /* Check that the host and target are sane. */
7493 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7496 len = native_encode_expr (expr, buffer, sizeof (buffer));
7500 return native_interpret_expr (type, buffer, len);
7503 /* Build an expression for the address of T. Folds away INDIRECT_REF
7504 to avoid confusing the gimplify process. */
7507 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7509 /* The size of the object is not relevant when talking about its address. */
7510 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7511 t = TREE_OPERAND (t, 0);
7513 if (TREE_CODE (t) == INDIRECT_REF)
7515 t = TREE_OPERAND (t, 0);
7517 if (TREE_TYPE (t) != ptrtype)
7518 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7520 else if (TREE_CODE (t) == MEM_REF
7521 && integer_zerop (TREE_OPERAND (t, 1)))
7522 return TREE_OPERAND (t, 0);
7523 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7525 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7527 if (TREE_TYPE (t) != ptrtype)
7528 t = fold_convert_loc (loc, ptrtype, t);
7531 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7536 /* Build an expression for the address of T. */
7539 build_fold_addr_expr_loc (location_t loc, tree t)
7541 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7543 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7546 /* Fold a unary expression of code CODE and type TYPE with operand
7547 OP0. Return the folded expression if folding is successful.
7548 Otherwise, return NULL_TREE. */
7551 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7555 enum tree_code_class kind = TREE_CODE_CLASS (code);
7557 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7558 && TREE_CODE_LENGTH (code) == 1);
7563 if (CONVERT_EXPR_CODE_P (code)
7564 || code == FLOAT_EXPR || code == ABS_EXPR)
7566 /* Don't use STRIP_NOPS, because signedness of argument type
7568 STRIP_SIGN_NOPS (arg0);
7572 /* Strip any conversions that don't change the mode. This
7573 is safe for every expression, except for a comparison
7574 expression because its signedness is derived from its
7577 Note that this is done as an internal manipulation within
7578 the constant folder, in order to find the simplest
7579 representation of the arguments so that their form can be
7580 studied. In any cases, the appropriate type conversions
7581 should be put back in the tree that will get out of the
7587 if (TREE_CODE_CLASS (code) == tcc_unary)
7589 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7590 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7591 fold_build1_loc (loc, code, type,
7592 fold_convert_loc (loc, TREE_TYPE (op0),
7593 TREE_OPERAND (arg0, 1))));
7594 else if (TREE_CODE (arg0) == COND_EXPR)
7596 tree arg01 = TREE_OPERAND (arg0, 1);
7597 tree arg02 = TREE_OPERAND (arg0, 2);
7598 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7599 arg01 = fold_build1_loc (loc, code, type,
7600 fold_convert_loc (loc,
7601 TREE_TYPE (op0), arg01));
7602 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7603 arg02 = fold_build1_loc (loc, code, type,
7604 fold_convert_loc (loc,
7605 TREE_TYPE (op0), arg02));
7606 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7609 /* If this was a conversion, and all we did was to move into
7610 inside the COND_EXPR, bring it back out. But leave it if
7611 it is a conversion from integer to integer and the
7612 result precision is no wider than a word since such a
7613 conversion is cheap and may be optimized away by combine,
7614 while it couldn't if it were outside the COND_EXPR. Then return
7615 so we don't get into an infinite recursion loop taking the
7616 conversion out and then back in. */
7618 if ((CONVERT_EXPR_CODE_P (code)
7619 || code == NON_LVALUE_EXPR)
7620 && TREE_CODE (tem) == COND_EXPR
7621 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7622 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7623 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7624 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7625 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7626 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7627 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7629 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7630 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7631 || flag_syntax_only))
7632 tem = build1_loc (loc, code, type,
7634 TREE_TYPE (TREE_OPERAND
7635 (TREE_OPERAND (tem, 1), 0)),
7636 TREE_OPERAND (tem, 0),
7637 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7638 TREE_OPERAND (TREE_OPERAND (tem, 2),
7647 /* Re-association barriers around constants and other re-association
7648 barriers can be removed. */
7649 if (CONSTANT_CLASS_P (op0)
7650 || TREE_CODE (op0) == PAREN_EXPR)
7651 return fold_convert_loc (loc, type, op0);
7656 case FIX_TRUNC_EXPR:
7657 if (TREE_TYPE (op0) == type)
7660 if (COMPARISON_CLASS_P (op0))
7662 /* If we have (type) (a CMP b) and type is an integral type, return
7663 new expression involving the new type. */
7664 if (INTEGRAL_TYPE_P (type))
7665 return fold_build2_loc (loc, TREE_CODE (op0), type,
7666 TREE_OPERAND (op0, 0),
7667 TREE_OPERAND (op0, 1));
7669 return fold_build3_loc (loc, COND_EXPR, type, op0,
7670 fold_convert (type, boolean_true_node),
7671 fold_convert (type, boolean_false_node));
7674 /* Handle cases of two conversions in a row. */
7675 if (CONVERT_EXPR_P (op0))
7677 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7678 tree inter_type = TREE_TYPE (op0);
7679 int inside_int = INTEGRAL_TYPE_P (inside_type);
7680 int inside_ptr = POINTER_TYPE_P (inside_type);
7681 int inside_float = FLOAT_TYPE_P (inside_type);
7682 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7683 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7684 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7685 int inter_int = INTEGRAL_TYPE_P (inter_type);
7686 int inter_ptr = POINTER_TYPE_P (inter_type);
7687 int inter_float = FLOAT_TYPE_P (inter_type);
7688 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7689 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7690 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7691 int final_int = INTEGRAL_TYPE_P (type);
7692 int final_ptr = POINTER_TYPE_P (type);
7693 int final_float = FLOAT_TYPE_P (type);
7694 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7695 unsigned int final_prec = TYPE_PRECISION (type);
7696 int final_unsignedp = TYPE_UNSIGNED (type);
7698 /* In addition to the cases of two conversions in a row
7699 handled below, if we are converting something to its own
7700 type via an object of identical or wider precision, neither
7701 conversion is needed. */
7702 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7703 && (((inter_int || inter_ptr) && final_int)
7704 || (inter_float && final_float))
7705 && inter_prec >= final_prec)
7706 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7708 /* Likewise, if the intermediate and initial types are either both
7709 float or both integer, we don't need the middle conversion if the
7710 former is wider than the latter and doesn't change the signedness
7711 (for integers). Avoid this if the final type is a pointer since
7712 then we sometimes need the middle conversion. Likewise if the
7713 final type has a precision not equal to the size of its mode. */
7714 if (((inter_int && inside_int)
7715 || (inter_float && inside_float)
7716 || (inter_vec && inside_vec))
7717 && inter_prec >= inside_prec
7718 && (inter_float || inter_vec
7719 || inter_unsignedp == inside_unsignedp)
7720 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7721 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7723 && (! final_vec || inter_prec == inside_prec))
7724 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7726 /* If we have a sign-extension of a zero-extended value, we can
7727 replace that by a single zero-extension. */
7728 if (inside_int && inter_int && final_int
7729 && inside_prec < inter_prec && inter_prec < final_prec
7730 && inside_unsignedp && !inter_unsignedp)
7731 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7733 /* Two conversions in a row are not needed unless:
7734 - some conversion is floating-point (overstrict for now), or
7735 - some conversion is a vector (overstrict for now), or
7736 - the intermediate type is narrower than both initial and
7738 - the intermediate type and innermost type differ in signedness,
7739 and the outermost type is wider than the intermediate, or
7740 - the initial type is a pointer type and the precisions of the
7741 intermediate and final types differ, or
7742 - the final type is a pointer type and the precisions of the
7743 initial and intermediate types differ. */
7744 if (! inside_float && ! inter_float && ! final_float
7745 && ! inside_vec && ! inter_vec && ! final_vec
7746 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7747 && ! (inside_int && inter_int
7748 && inter_unsignedp != inside_unsignedp
7749 && inter_prec < final_prec)
7750 && ((inter_unsignedp && inter_prec > inside_prec)
7751 == (final_unsignedp && final_prec > inter_prec))
7752 && ! (inside_ptr && inter_prec != final_prec)
7753 && ! (final_ptr && inside_prec != inter_prec)
7754 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7755 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7756 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7759 /* Handle (T *)&A.B.C for A being of type T and B and C
7760 living at offset zero. This occurs frequently in
7761 C++ upcasting and then accessing the base. */
7762 if (TREE_CODE (op0) == ADDR_EXPR
7763 && POINTER_TYPE_P (type)
7764 && handled_component_p (TREE_OPERAND (op0, 0)))
7766 HOST_WIDE_INT bitsize, bitpos;
7768 enum machine_mode mode;
7769 int unsignedp, volatilep;
7770 tree base = TREE_OPERAND (op0, 0);
7771 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7772 &mode, &unsignedp, &volatilep, false);
7773 /* If the reference was to a (constant) zero offset, we can use
7774 the address of the base if it has the same base type
7775 as the result type and the pointer type is unqualified. */
7776 if (! offset && bitpos == 0
7777 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7778 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7779 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7780 return fold_convert_loc (loc, type,
7781 build_fold_addr_expr_loc (loc, base));
7784 if (TREE_CODE (op0) == MODIFY_EXPR
7785 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7786 /* Detect assigning a bitfield. */
7787 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7789 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7791 /* Don't leave an assignment inside a conversion
7792 unless assigning a bitfield. */
7793 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7794 /* First do the assignment, then return converted constant. */
7795 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7796 TREE_NO_WARNING (tem) = 1;
7797 TREE_USED (tem) = 1;
7801 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7802 constants (if x has signed type, the sign bit cannot be set
7803 in c). This folds extension into the BIT_AND_EXPR.
7804 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7805 very likely don't have maximal range for their precision and this
7806 transformation effectively doesn't preserve non-maximal ranges. */
7807 if (TREE_CODE (type) == INTEGER_TYPE
7808 && TREE_CODE (op0) == BIT_AND_EXPR
7809 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7811 tree and_expr = op0;
7812 tree and0 = TREE_OPERAND (and_expr, 0);
7813 tree and1 = TREE_OPERAND (and_expr, 1);
7816 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7817 || (TYPE_PRECISION (type)
7818 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7820 else if (TYPE_PRECISION (TREE_TYPE (and1))
7821 <= HOST_BITS_PER_WIDE_INT
7822 && host_integerp (and1, 1))
7824 unsigned HOST_WIDE_INT cst;
7826 cst = tree_low_cst (and1, 1);
7827 cst &= (HOST_WIDE_INT) -1
7828 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7829 change = (cst == 0);
7830 #ifdef LOAD_EXTEND_OP
7832 && !flag_syntax_only
7833 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7836 tree uns = unsigned_type_for (TREE_TYPE (and0));
7837 and0 = fold_convert_loc (loc, uns, and0);
7838 and1 = fold_convert_loc (loc, uns, and1);
7844 tem = force_fit_type_double (type, tree_to_double_int (and1),
7845 0, TREE_OVERFLOW (and1));
7846 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7847 fold_convert_loc (loc, type, and0), tem);
7851 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7852 when one of the new casts will fold away. Conservatively we assume
7853 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7854 if (POINTER_TYPE_P (type)
7855 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7856 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7857 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7858 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7860 tree arg00 = TREE_OPERAND (arg0, 0);
7861 tree arg01 = TREE_OPERAND (arg0, 1);
7863 return fold_build2_loc (loc,
7864 TREE_CODE (arg0), type,
7865 fold_convert_loc (loc, type, arg00),
7866 fold_convert_loc (loc, sizetype, arg01));
7869 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7870 of the same precision, and X is an integer type not narrower than
7871 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7872 if (INTEGRAL_TYPE_P (type)
7873 && TREE_CODE (op0) == BIT_NOT_EXPR
7874 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7875 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7876 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7878 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7879 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7880 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7881 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7882 fold_convert_loc (loc, type, tem));
7885 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7886 type of X and Y (integer types only). */
7887 if (INTEGRAL_TYPE_P (type)
7888 && TREE_CODE (op0) == MULT_EXPR
7889 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7890 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7892 /* Be careful not to introduce new overflows. */
7894 if (TYPE_OVERFLOW_WRAPS (type))
7897 mult_type = unsigned_type_for (type);
7899 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7901 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7902 fold_convert_loc (loc, mult_type,
7903 TREE_OPERAND (op0, 0)),
7904 fold_convert_loc (loc, mult_type,
7905 TREE_OPERAND (op0, 1)));
7906 return fold_convert_loc (loc, type, tem);
7910 tem = fold_convert_const (code, type, op0);
7911 return tem ? tem : NULL_TREE;
7913 case ADDR_SPACE_CONVERT_EXPR:
7914 if (integer_zerop (arg0))
7915 return fold_convert_const (code, type, arg0);
7918 case FIXED_CONVERT_EXPR:
7919 tem = fold_convert_const (code, type, arg0);
7920 return tem ? tem : NULL_TREE;
7922 case VIEW_CONVERT_EXPR:
7923 if (TREE_TYPE (op0) == type)
7925 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7926 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7927 type, TREE_OPERAND (op0, 0));
7928 if (TREE_CODE (op0) == MEM_REF)
7929 return fold_build2_loc (loc, MEM_REF, type,
7930 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7932 /* For integral conversions with the same precision or pointer
7933 conversions use a NOP_EXPR instead. */
7934 if ((INTEGRAL_TYPE_P (type)
7935 || POINTER_TYPE_P (type))
7936 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7937 || POINTER_TYPE_P (TREE_TYPE (op0)))
7938 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7939 return fold_convert_loc (loc, type, op0);
7941 /* Strip inner integral conversions that do not change the precision. */
7942 if (CONVERT_EXPR_P (op0)
7943 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7944 || POINTER_TYPE_P (TREE_TYPE (op0)))
7945 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7946 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7947 && (TYPE_PRECISION (TREE_TYPE (op0))
7948 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7949 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7950 type, TREE_OPERAND (op0, 0));
7952 return fold_view_convert_expr (type, op0);
7955 tem = fold_negate_expr (loc, arg0);
7957 return fold_convert_loc (loc, type, tem);
7961 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7962 return fold_abs_const (arg0, type);
7963 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7964 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7965 /* Convert fabs((double)float) into (double)fabsf(float). */
7966 else if (TREE_CODE (arg0) == NOP_EXPR
7967 && TREE_CODE (type) == REAL_TYPE)
7969 tree targ0 = strip_float_extensions (arg0);
7971 return fold_convert_loc (loc, type,
7972 fold_build1_loc (loc, ABS_EXPR,
7976 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7977 else if (TREE_CODE (arg0) == ABS_EXPR)
7979 else if (tree_expr_nonnegative_p (arg0))
7982 /* Strip sign ops from argument. */
7983 if (TREE_CODE (type) == REAL_TYPE)
7985 tem = fold_strip_sign_ops (arg0);
7987 return fold_build1_loc (loc, ABS_EXPR, type,
7988 fold_convert_loc (loc, type, tem));
7993 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7994 return fold_convert_loc (loc, type, arg0);
7995 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7997 tree itype = TREE_TYPE (type);
7998 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7999 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8000 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8001 negate_expr (ipart));
8003 if (TREE_CODE (arg0) == COMPLEX_CST)
8005 tree itype = TREE_TYPE (type);
8006 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8007 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8008 return build_complex (type, rpart, negate_expr (ipart));
8010 if (TREE_CODE (arg0) == CONJ_EXPR)
8011 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8015 if (TREE_CODE (arg0) == INTEGER_CST)
8016 return fold_not_const (arg0, type);
8017 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8018 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8019 /* Convert ~ (-A) to A - 1. */
8020 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8021 return fold_build2_loc (loc, MINUS_EXPR, type,
8022 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8023 build_int_cst (type, 1));
8024 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8025 else if (INTEGRAL_TYPE_P (type)
8026 && ((TREE_CODE (arg0) == MINUS_EXPR
8027 && integer_onep (TREE_OPERAND (arg0, 1)))
8028 || (TREE_CODE (arg0) == PLUS_EXPR
8029 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8030 return fold_build1_loc (loc, NEGATE_EXPR, type,
8031 fold_convert_loc (loc, type,
8032 TREE_OPERAND (arg0, 0)));
8033 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8034 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8035 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8036 fold_convert_loc (loc, type,
8037 TREE_OPERAND (arg0, 0)))))
8038 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8039 fold_convert_loc (loc, type,
8040 TREE_OPERAND (arg0, 1)));
8041 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8042 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8043 fold_convert_loc (loc, type,
8044 TREE_OPERAND (arg0, 1)))))
8045 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8046 fold_convert_loc (loc, type,
8047 TREE_OPERAND (arg0, 0)), tem);
8048 /* Perform BIT_NOT_EXPR on each element individually. */
8049 else if (TREE_CODE (arg0) == VECTOR_CST)
8051 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8052 int count = TYPE_VECTOR_SUBPARTS (type), i;
8054 for (i = 0; i < count; i++)
8058 elem = TREE_VALUE (elements);
8059 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8060 if (elem == NULL_TREE)
8062 elements = TREE_CHAIN (elements);
8065 elem = build_int_cst (TREE_TYPE (type), -1);
8066 list = tree_cons (NULL_TREE, elem, list);
8069 return build_vector (type, nreverse (list));
8074 case TRUTH_NOT_EXPR:
8075 /* The argument to invert_truthvalue must have Boolean type. */
8076 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8077 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8079 /* Note that the operand of this must be an int
8080 and its values must be 0 or 1.
8081 ("true" is a fixed value perhaps depending on the language,
8082 but we don't handle values other than 1 correctly yet.) */
8083 tem = fold_truth_not_expr (loc, arg0);
8086 return fold_convert_loc (loc, type, tem);
8089 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8090 return fold_convert_loc (loc, type, arg0);
8091 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8092 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8093 TREE_OPERAND (arg0, 1));
8094 if (TREE_CODE (arg0) == COMPLEX_CST)
8095 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8096 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8098 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8099 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8100 fold_build1_loc (loc, REALPART_EXPR, itype,
8101 TREE_OPERAND (arg0, 0)),
8102 fold_build1_loc (loc, REALPART_EXPR, itype,
8103 TREE_OPERAND (arg0, 1)));
8104 return fold_convert_loc (loc, type, tem);
8106 if (TREE_CODE (arg0) == CONJ_EXPR)
8108 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8109 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8110 TREE_OPERAND (arg0, 0));
8111 return fold_convert_loc (loc, type, tem);
8113 if (TREE_CODE (arg0) == CALL_EXPR)
8115 tree fn = get_callee_fndecl (arg0);
8116 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8117 switch (DECL_FUNCTION_CODE (fn))
8119 CASE_FLT_FN (BUILT_IN_CEXPI):
8120 fn = mathfn_built_in (type, BUILT_IN_COS);
8122 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8132 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8133 return build_zero_cst (type);
8134 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8135 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8136 TREE_OPERAND (arg0, 0));
8137 if (TREE_CODE (arg0) == COMPLEX_CST)
8138 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8139 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8141 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8142 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8143 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8144 TREE_OPERAND (arg0, 0)),
8145 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8146 TREE_OPERAND (arg0, 1)));
8147 return fold_convert_loc (loc, type, tem);
8149 if (TREE_CODE (arg0) == CONJ_EXPR)
8151 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8152 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8153 return fold_convert_loc (loc, type, negate_expr (tem));
8155 if (TREE_CODE (arg0) == CALL_EXPR)
8157 tree fn = get_callee_fndecl (arg0);
8158 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8159 switch (DECL_FUNCTION_CODE (fn))
8161 CASE_FLT_FN (BUILT_IN_CEXPI):
8162 fn = mathfn_built_in (type, BUILT_IN_SIN);
8164 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8174 /* Fold *&X to X if X is an lvalue. */
8175 if (TREE_CODE (op0) == ADDR_EXPR)
8177 tree op00 = TREE_OPERAND (op0, 0);
8178 if ((TREE_CODE (op00) == VAR_DECL
8179 || TREE_CODE (op00) == PARM_DECL
8180 || TREE_CODE (op00) == RESULT_DECL)
8181 && !TREE_READONLY (op00))
8188 } /* switch (code) */
8192 /* If the operation was a conversion do _not_ mark a resulting constant
8193 with TREE_OVERFLOW if the original constant was not. These conversions
8194 have implementation defined behavior and retaining the TREE_OVERFLOW
8195 flag here would confuse later passes such as VRP. */
8197 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8198 tree type, tree op0)
8200 tree res = fold_unary_loc (loc, code, type, op0);
8202 && TREE_CODE (res) == INTEGER_CST
8203 && TREE_CODE (op0) == INTEGER_CST
8204 && CONVERT_EXPR_CODE_P (code))
8205 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8210 /* Fold a binary expression of code CODE and type TYPE with operands
8211 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8212 Return the folded expression if folding is successful. Otherwise,
8213 return NULL_TREE. */
8216 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8218 enum tree_code compl_code;
8220 if (code == MIN_EXPR)
8221 compl_code = MAX_EXPR;
8222 else if (code == MAX_EXPR)
8223 compl_code = MIN_EXPR;
8227 /* MIN (MAX (a, b), b) == b. */
8228 if (TREE_CODE (op0) == compl_code
8229 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8230 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8232 /* MIN (MAX (b, a), b) == b. */
8233 if (TREE_CODE (op0) == compl_code
8234 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8235 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8236 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8238 /* MIN (a, MAX (a, b)) == a. */
8239 if (TREE_CODE (op1) == compl_code
8240 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8241 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8242 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8244 /* MIN (a, MAX (b, a)) == a. */
8245 if (TREE_CODE (op1) == compl_code
8246 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8247 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8248 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8253 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8254 by changing CODE to reduce the magnitude of constants involved in
8255 ARG0 of the comparison.
8256 Returns a canonicalized comparison tree if a simplification was
8257 possible, otherwise returns NULL_TREE.
8258 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8259 valid if signed overflow is undefined. */
8262 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8263 tree arg0, tree arg1,
8264 bool *strict_overflow_p)
8266 enum tree_code code0 = TREE_CODE (arg0);
8267 tree t, cst0 = NULL_TREE;
8271 /* Match A +- CST code arg1 and CST code arg1. We can change the
8272 first form only if overflow is undefined. */
8273 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8274 /* In principle pointers also have undefined overflow behavior,
8275 but that causes problems elsewhere. */
8276 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8277 && (code0 == MINUS_EXPR
8278 || code0 == PLUS_EXPR)
8279 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8280 || code0 == INTEGER_CST))
8283 /* Identify the constant in arg0 and its sign. */
8284 if (code0 == INTEGER_CST)
8287 cst0 = TREE_OPERAND (arg0, 1);
8288 sgn0 = tree_int_cst_sgn (cst0);
8290 /* Overflowed constants and zero will cause problems. */
8291 if (integer_zerop (cst0)
8292 || TREE_OVERFLOW (cst0))
8295 /* See if we can reduce the magnitude of the constant in
8296 arg0 by changing the comparison code. */
8297 if (code0 == INTEGER_CST)
8299 /* CST <= arg1 -> CST-1 < arg1. */
8300 if (code == LE_EXPR && sgn0 == 1)
8302 /* -CST < arg1 -> -CST-1 <= arg1. */
8303 else if (code == LT_EXPR && sgn0 == -1)
8305 /* CST > arg1 -> CST-1 >= arg1. */
8306 else if (code == GT_EXPR && sgn0 == 1)
8308 /* -CST >= arg1 -> -CST-1 > arg1. */
8309 else if (code == GE_EXPR && sgn0 == -1)
8313 /* arg1 code' CST' might be more canonical. */
8318 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8320 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8322 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8323 else if (code == GT_EXPR
8324 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8326 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8327 else if (code == LE_EXPR
8328 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8330 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8331 else if (code == GE_EXPR
8332 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8336 *strict_overflow_p = true;
8339 /* Now build the constant reduced in magnitude. But not if that
8340 would produce one outside of its types range. */
8341 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8343 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8344 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8346 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8347 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8348 /* We cannot swap the comparison here as that would cause us to
8349 endlessly recurse. */
8352 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8353 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8354 if (code0 != INTEGER_CST)
8355 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8357 /* If swapping might yield to a more canonical form, do so. */
8359 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8361 return fold_build2_loc (loc, code, type, t, arg1);
8364 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8365 overflow further. Try to decrease the magnitude of constants involved
8366 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8367 and put sole constants at the second argument position.
8368 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8371 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8372 tree arg0, tree arg1)
8375 bool strict_overflow_p;
8376 const char * const warnmsg = G_("assuming signed overflow does not occur "
8377 "when reducing constant in comparison");
8379 /* Try canonicalization by simplifying arg0. */
8380 strict_overflow_p = false;
8381 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8382 &strict_overflow_p);
8385 if (strict_overflow_p)
8386 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8390 /* Try canonicalization by simplifying arg1 using the swapped
8392 code = swap_tree_comparison (code);
8393 strict_overflow_p = false;
8394 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8395 &strict_overflow_p);
8396 if (t && strict_overflow_p)
8397 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8401 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8402 space. This is used to avoid issuing overflow warnings for
8403 expressions like &p->x which can not wrap. */
8406 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8408 unsigned HOST_WIDE_INT offset_low, total_low;
8409 HOST_WIDE_INT size, offset_high, total_high;
8411 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8417 if (offset == NULL_TREE)
8422 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8426 offset_low = TREE_INT_CST_LOW (offset);
8427 offset_high = TREE_INT_CST_HIGH (offset);
8430 if (add_double_with_sign (offset_low, offset_high,
8431 bitpos / BITS_PER_UNIT, 0,
8432 &total_low, &total_high,
8436 if (total_high != 0)
8439 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8443 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8445 if (TREE_CODE (base) == ADDR_EXPR)
8447 HOST_WIDE_INT base_size;
8449 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8450 if (base_size > 0 && size < base_size)
8454 return total_low > (unsigned HOST_WIDE_INT) size;
8457 /* Subroutine of fold_binary. This routine performs all of the
8458 transformations that are common to the equality/inequality
8459 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8460 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8461 fold_binary should call fold_binary. Fold a comparison with
8462 tree code CODE and type TYPE with operands OP0 and OP1. Return
8463 the folded comparison or NULL_TREE. */
8466 fold_comparison (location_t loc, enum tree_code code, tree type,
8469 tree arg0, arg1, tem;
8474 STRIP_SIGN_NOPS (arg0);
8475 STRIP_SIGN_NOPS (arg1);
8477 tem = fold_relational_const (code, type, arg0, arg1);
8478 if (tem != NULL_TREE)
8481 /* If one arg is a real or integer constant, put it last. */
8482 if (tree_swap_operands_p (arg0, arg1, true))
8483 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8485 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8486 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8487 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8488 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8489 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8490 && (TREE_CODE (arg1) == INTEGER_CST
8491 && !TREE_OVERFLOW (arg1)))
8493 tree const1 = TREE_OPERAND (arg0, 1);
8495 tree variable = TREE_OPERAND (arg0, 0);
8498 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8500 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8501 TREE_TYPE (arg1), const2, const1);
8503 /* If the constant operation overflowed this can be
8504 simplified as a comparison against INT_MAX/INT_MIN. */
8505 if (TREE_CODE (lhs) == INTEGER_CST
8506 && TREE_OVERFLOW (lhs))
8508 int const1_sgn = tree_int_cst_sgn (const1);
8509 enum tree_code code2 = code;
8511 /* Get the sign of the constant on the lhs if the
8512 operation were VARIABLE + CONST1. */
8513 if (TREE_CODE (arg0) == MINUS_EXPR)
8514 const1_sgn = -const1_sgn;
8516 /* The sign of the constant determines if we overflowed
8517 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8518 Canonicalize to the INT_MIN overflow by swapping the comparison
8520 if (const1_sgn == -1)
8521 code2 = swap_tree_comparison (code);
8523 /* We now can look at the canonicalized case
8524 VARIABLE + 1 CODE2 INT_MIN
8525 and decide on the result. */
8526 if (code2 == LT_EXPR
8528 || code2 == EQ_EXPR)
8529 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8530 else if (code2 == NE_EXPR
8532 || code2 == GT_EXPR)
8533 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8536 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8537 && (TREE_CODE (lhs) != INTEGER_CST
8538 || !TREE_OVERFLOW (lhs)))
8540 if (code != EQ_EXPR && code != NE_EXPR)
8541 fold_overflow_warning ("assuming signed overflow does not occur "
8542 "when changing X +- C1 cmp C2 to "
8544 WARN_STRICT_OVERFLOW_COMPARISON);
8545 return fold_build2_loc (loc, code, type, variable, lhs);
8549 /* For comparisons of pointers we can decompose it to a compile time
8550 comparison of the base objects and the offsets into the object.
8551 This requires at least one operand being an ADDR_EXPR or a
8552 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8553 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8554 && (TREE_CODE (arg0) == ADDR_EXPR
8555 || TREE_CODE (arg1) == ADDR_EXPR
8556 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8557 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8559 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8560 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8561 enum machine_mode mode;
8562 int volatilep, unsignedp;
8563 bool indirect_base0 = false, indirect_base1 = false;
8565 /* Get base and offset for the access. Strip ADDR_EXPR for
8566 get_inner_reference, but put it back by stripping INDIRECT_REF
8567 off the base object if possible. indirect_baseN will be true
8568 if baseN is not an address but refers to the object itself. */
8570 if (TREE_CODE (arg0) == ADDR_EXPR)
8572 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8573 &bitsize, &bitpos0, &offset0, &mode,
8574 &unsignedp, &volatilep, false);
8575 if (TREE_CODE (base0) == INDIRECT_REF)
8576 base0 = TREE_OPERAND (base0, 0);
8578 indirect_base0 = true;
8580 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8582 base0 = TREE_OPERAND (arg0, 0);
8583 STRIP_SIGN_NOPS (base0);
8584 if (TREE_CODE (base0) == ADDR_EXPR)
8586 base0 = TREE_OPERAND (base0, 0);
8587 indirect_base0 = true;
8589 offset0 = TREE_OPERAND (arg0, 1);
8593 if (TREE_CODE (arg1) == ADDR_EXPR)
8595 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8596 &bitsize, &bitpos1, &offset1, &mode,
8597 &unsignedp, &volatilep, false);
8598 if (TREE_CODE (base1) == INDIRECT_REF)
8599 base1 = TREE_OPERAND (base1, 0);
8601 indirect_base1 = true;
8603 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8605 base1 = TREE_OPERAND (arg1, 0);
8606 STRIP_SIGN_NOPS (base1);
8607 if (TREE_CODE (base1) == ADDR_EXPR)
8609 base1 = TREE_OPERAND (base1, 0);
8610 indirect_base1 = true;
8612 offset1 = TREE_OPERAND (arg1, 1);
8615 /* A local variable can never be pointed to by
8616 the default SSA name of an incoming parameter. */
8617 if ((TREE_CODE (arg0) == ADDR_EXPR
8619 && TREE_CODE (base0) == VAR_DECL
8620 && auto_var_in_fn_p (base0, current_function_decl)
8622 && TREE_CODE (base1) == SSA_NAME
8623 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8624 && SSA_NAME_IS_DEFAULT_DEF (base1))
8625 || (TREE_CODE (arg1) == ADDR_EXPR
8627 && TREE_CODE (base1) == VAR_DECL
8628 && auto_var_in_fn_p (base1, current_function_decl)
8630 && TREE_CODE (base0) == SSA_NAME
8631 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8632 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8634 if (code == NE_EXPR)
8635 return constant_boolean_node (1, type);
8636 else if (code == EQ_EXPR)
8637 return constant_boolean_node (0, type);
8639 /* If we have equivalent bases we might be able to simplify. */
8640 else if (indirect_base0 == indirect_base1
8641 && operand_equal_p (base0, base1, 0))
8643 /* We can fold this expression to a constant if the non-constant
8644 offset parts are equal. */
8645 if ((offset0 == offset1
8646 || (offset0 && offset1
8647 && operand_equal_p (offset0, offset1, 0)))
8650 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8655 && bitpos0 != bitpos1
8656 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8657 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8658 fold_overflow_warning (("assuming pointer wraparound does not "
8659 "occur when comparing P +- C1 with "
8661 WARN_STRICT_OVERFLOW_CONDITIONAL);
8666 return constant_boolean_node (bitpos0 == bitpos1, type);
8668 return constant_boolean_node (bitpos0 != bitpos1, type);
8670 return constant_boolean_node (bitpos0 < bitpos1, type);
8672 return constant_boolean_node (bitpos0 <= bitpos1, type);
8674 return constant_boolean_node (bitpos0 >= bitpos1, type);
8676 return constant_boolean_node (bitpos0 > bitpos1, type);
8680 /* We can simplify the comparison to a comparison of the variable
8681 offset parts if the constant offset parts are equal.
8682 Be careful to use signed size type here because otherwise we
8683 mess with array offsets in the wrong way. This is possible
8684 because pointer arithmetic is restricted to retain within an
8685 object and overflow on pointer differences is undefined as of
8686 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8687 else if (bitpos0 == bitpos1
8688 && ((code == EQ_EXPR || code == NE_EXPR)
8689 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8691 /* By converting to signed size type we cover middle-end pointer
8692 arithmetic which operates on unsigned pointer types of size
8693 type size and ARRAY_REF offsets which are properly sign or
8694 zero extended from their type in case it is narrower than
8696 if (offset0 == NULL_TREE)
8697 offset0 = build_int_cst (ssizetype, 0);
8699 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8700 if (offset1 == NULL_TREE)
8701 offset1 = build_int_cst (ssizetype, 0);
8703 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8707 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8708 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8709 fold_overflow_warning (("assuming pointer wraparound does not "
8710 "occur when comparing P +- C1 with "
8712 WARN_STRICT_OVERFLOW_COMPARISON);
8714 return fold_build2_loc (loc, code, type, offset0, offset1);
8717 /* For non-equal bases we can simplify if they are addresses
8718 of local binding decls or constants. */
8719 else if (indirect_base0 && indirect_base1
8720 /* We know that !operand_equal_p (base0, base1, 0)
8721 because the if condition was false. But make
8722 sure two decls are not the same. */
8724 && TREE_CODE (arg0) == ADDR_EXPR
8725 && TREE_CODE (arg1) == ADDR_EXPR
8726 && (((TREE_CODE (base0) == VAR_DECL
8727 || TREE_CODE (base0) == PARM_DECL)
8728 && (targetm.binds_local_p (base0)
8729 || CONSTANT_CLASS_P (base1)))
8730 || CONSTANT_CLASS_P (base0))
8731 && (((TREE_CODE (base1) == VAR_DECL
8732 || TREE_CODE (base1) == PARM_DECL)
8733 && (targetm.binds_local_p (base1)
8734 || CONSTANT_CLASS_P (base0)))
8735 || CONSTANT_CLASS_P (base1)))
8737 if (code == EQ_EXPR)
8738 return omit_two_operands_loc (loc, type, boolean_false_node,
8740 else if (code == NE_EXPR)
8741 return omit_two_operands_loc (loc, type, boolean_true_node,
8744 /* For equal offsets we can simplify to a comparison of the
8746 else if (bitpos0 == bitpos1
8748 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8750 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8751 && ((offset0 == offset1)
8752 || (offset0 && offset1
8753 && operand_equal_p (offset0, offset1, 0))))
8756 base0 = build_fold_addr_expr_loc (loc, base0);
8758 base1 = build_fold_addr_expr_loc (loc, base1);
8759 return fold_build2_loc (loc, code, type, base0, base1);
8763 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8764 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8765 the resulting offset is smaller in absolute value than the
8767 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8768 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8769 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8770 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8771 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8772 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8773 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8775 tree const1 = TREE_OPERAND (arg0, 1);
8776 tree const2 = TREE_OPERAND (arg1, 1);
8777 tree variable1 = TREE_OPERAND (arg0, 0);
8778 tree variable2 = TREE_OPERAND (arg1, 0);
8780 const char * const warnmsg = G_("assuming signed overflow does not "
8781 "occur when combining constants around "
8784 /* Put the constant on the side where it doesn't overflow and is
8785 of lower absolute value than before. */
8786 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8787 ? MINUS_EXPR : PLUS_EXPR,
8789 if (!TREE_OVERFLOW (cst)
8790 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8792 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8793 return fold_build2_loc (loc, code, type,
8795 fold_build2_loc (loc,
8796 TREE_CODE (arg1), TREE_TYPE (arg1),
8800 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8801 ? MINUS_EXPR : PLUS_EXPR,
8803 if (!TREE_OVERFLOW (cst)
8804 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8806 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8807 return fold_build2_loc (loc, code, type,
8808 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8814 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8815 signed arithmetic case. That form is created by the compiler
8816 often enough for folding it to be of value. One example is in
8817 computing loop trip counts after Operator Strength Reduction. */
8818 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8819 && TREE_CODE (arg0) == MULT_EXPR
8820 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8821 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8822 && integer_zerop (arg1))
8824 tree const1 = TREE_OPERAND (arg0, 1);
8825 tree const2 = arg1; /* zero */
8826 tree variable1 = TREE_OPERAND (arg0, 0);
8827 enum tree_code cmp_code = code;
8829 /* Handle unfolded multiplication by zero. */
8830 if (integer_zerop (const1))
8831 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8833 fold_overflow_warning (("assuming signed overflow does not occur when "
8834 "eliminating multiplication in comparison "
8836 WARN_STRICT_OVERFLOW_COMPARISON);
8838 /* If const1 is negative we swap the sense of the comparison. */
8839 if (tree_int_cst_sgn (const1) < 0)
8840 cmp_code = swap_tree_comparison (cmp_code);
8842 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8845 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8849 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8851 tree targ0 = strip_float_extensions (arg0);
8852 tree targ1 = strip_float_extensions (arg1);
8853 tree newtype = TREE_TYPE (targ0);
8855 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8856 newtype = TREE_TYPE (targ1);
8858 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8859 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8860 return fold_build2_loc (loc, code, type,
8861 fold_convert_loc (loc, newtype, targ0),
8862 fold_convert_loc (loc, newtype, targ1));
8864 /* (-a) CMP (-b) -> b CMP a */
8865 if (TREE_CODE (arg0) == NEGATE_EXPR
8866 && TREE_CODE (arg1) == NEGATE_EXPR)
8867 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8868 TREE_OPERAND (arg0, 0));
8870 if (TREE_CODE (arg1) == REAL_CST)
8872 REAL_VALUE_TYPE cst;
8873 cst = TREE_REAL_CST (arg1);
8875 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8876 if (TREE_CODE (arg0) == NEGATE_EXPR)
8877 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8878 TREE_OPERAND (arg0, 0),
8879 build_real (TREE_TYPE (arg1),
8880 real_value_negate (&cst)));
8882 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8883 /* a CMP (-0) -> a CMP 0 */
8884 if (REAL_VALUE_MINUS_ZERO (cst))
8885 return fold_build2_loc (loc, code, type, arg0,
8886 build_real (TREE_TYPE (arg1), dconst0));
8888 /* x != NaN is always true, other ops are always false. */
8889 if (REAL_VALUE_ISNAN (cst)
8890 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8892 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8893 return omit_one_operand_loc (loc, type, tem, arg0);
8896 /* Fold comparisons against infinity. */
8897 if (REAL_VALUE_ISINF (cst)
8898 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8900 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8901 if (tem != NULL_TREE)
8906 /* If this is a comparison of a real constant with a PLUS_EXPR
8907 or a MINUS_EXPR of a real constant, we can convert it into a
8908 comparison with a revised real constant as long as no overflow
8909 occurs when unsafe_math_optimizations are enabled. */
8910 if (flag_unsafe_math_optimizations
8911 && TREE_CODE (arg1) == REAL_CST
8912 && (TREE_CODE (arg0) == PLUS_EXPR
8913 || TREE_CODE (arg0) == MINUS_EXPR)
8914 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8915 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8916 ? MINUS_EXPR : PLUS_EXPR,
8917 arg1, TREE_OPERAND (arg0, 1)))
8918 && !TREE_OVERFLOW (tem))
8919 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8921 /* Likewise, we can simplify a comparison of a real constant with
8922 a MINUS_EXPR whose first operand is also a real constant, i.e.
8923 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8924 floating-point types only if -fassociative-math is set. */
8925 if (flag_associative_math
8926 && TREE_CODE (arg1) == REAL_CST
8927 && TREE_CODE (arg0) == MINUS_EXPR
8928 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8929 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8931 && !TREE_OVERFLOW (tem))
8932 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8933 TREE_OPERAND (arg0, 1), tem);
8935 /* Fold comparisons against built-in math functions. */
8936 if (TREE_CODE (arg1) == REAL_CST
8937 && flag_unsafe_math_optimizations
8938 && ! flag_errno_math)
8940 enum built_in_function fcode = builtin_mathfn_code (arg0);
8942 if (fcode != END_BUILTINS)
8944 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8945 if (tem != NULL_TREE)
8951 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8952 && CONVERT_EXPR_P (arg0))
8954 /* If we are widening one operand of an integer comparison,
8955 see if the other operand is similarly being widened. Perhaps we
8956 can do the comparison in the narrower type. */
8957 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8961 /* Or if we are changing signedness. */
8962 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8967 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8968 constant, we can simplify it. */
8969 if (TREE_CODE (arg1) == INTEGER_CST
8970 && (TREE_CODE (arg0) == MIN_EXPR
8971 || TREE_CODE (arg0) == MAX_EXPR)
8972 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8974 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8979 /* Simplify comparison of something with itself. (For IEEE
8980 floating-point, we can only do some of these simplifications.) */
8981 if (operand_equal_p (arg0, arg1, 0))
8986 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8987 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8988 return constant_boolean_node (1, type);
8993 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8994 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8995 return constant_boolean_node (1, type);
8996 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
8999 /* For NE, we can only do this simplification if integer
9000 or we don't honor IEEE floating point NaNs. */
9001 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9002 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9004 /* ... fall through ... */
9007 return constant_boolean_node (0, type);
9013 /* If we are comparing an expression that just has comparisons
9014 of two integer values, arithmetic expressions of those comparisons,
9015 and constants, we can simplify it. There are only three cases
9016 to check: the two values can either be equal, the first can be
9017 greater, or the second can be greater. Fold the expression for
9018 those three values. Since each value must be 0 or 1, we have
9019 eight possibilities, each of which corresponds to the constant 0
9020 or 1 or one of the six possible comparisons.
9022 This handles common cases like (a > b) == 0 but also handles
9023 expressions like ((x > y) - (y > x)) > 0, which supposedly
9024 occur in macroized code. */
9026 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9028 tree cval1 = 0, cval2 = 0;
9031 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9032 /* Don't handle degenerate cases here; they should already
9033 have been handled anyway. */
9034 && cval1 != 0 && cval2 != 0
9035 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9036 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9037 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9038 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9039 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9040 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9041 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9043 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9044 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9046 /* We can't just pass T to eval_subst in case cval1 or cval2
9047 was the same as ARG1. */
9050 = fold_build2_loc (loc, code, type,
9051 eval_subst (loc, arg0, cval1, maxval,
9055 = fold_build2_loc (loc, code, type,
9056 eval_subst (loc, arg0, cval1, maxval,
9060 = fold_build2_loc (loc, code, type,
9061 eval_subst (loc, arg0, cval1, minval,
9065 /* All three of these results should be 0 or 1. Confirm they are.
9066 Then use those values to select the proper code to use. */
9068 if (TREE_CODE (high_result) == INTEGER_CST
9069 && TREE_CODE (equal_result) == INTEGER_CST
9070 && TREE_CODE (low_result) == INTEGER_CST)
9072 /* Make a 3-bit mask with the high-order bit being the
9073 value for `>', the next for '=', and the low for '<'. */
9074 switch ((integer_onep (high_result) * 4)
9075 + (integer_onep (equal_result) * 2)
9076 + integer_onep (low_result))
9080 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9101 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9106 tem = save_expr (build2 (code, type, cval1, cval2));
9107 SET_EXPR_LOCATION (tem, loc);
9110 return fold_build2_loc (loc, code, type, cval1, cval2);
9115 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9116 into a single range test. */
9117 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9118 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9119 && TREE_CODE (arg1) == INTEGER_CST
9120 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9121 && !integer_zerop (TREE_OPERAND (arg0, 1))
9122 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9123 && !TREE_OVERFLOW (arg1))
9125 tem = fold_div_compare (loc, code, type, arg0, arg1);
9126 if (tem != NULL_TREE)
9130 /* Fold ~X op ~Y as Y op X. */
9131 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9132 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9134 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9135 return fold_build2_loc (loc, code, type,
9136 fold_convert_loc (loc, cmp_type,
9137 TREE_OPERAND (arg1, 0)),
9138 TREE_OPERAND (arg0, 0));
9141 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9142 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9143 && TREE_CODE (arg1) == INTEGER_CST)
9145 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9146 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9147 TREE_OPERAND (arg0, 0),
9148 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9149 fold_convert_loc (loc, cmp_type, arg1)));
9156 /* Subroutine of fold_binary. Optimize complex multiplications of the
9157 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9158 argument EXPR represents the expression "z" of type TYPE. */
9161 fold_mult_zconjz (location_t loc, tree type, tree expr)
9163 tree itype = TREE_TYPE (type);
9164 tree rpart, ipart, tem;
9166 if (TREE_CODE (expr) == COMPLEX_EXPR)
9168 rpart = TREE_OPERAND (expr, 0);
9169 ipart = TREE_OPERAND (expr, 1);
9171 else if (TREE_CODE (expr) == COMPLEX_CST)
9173 rpart = TREE_REALPART (expr);
9174 ipart = TREE_IMAGPART (expr);
9178 expr = save_expr (expr);
9179 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9180 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9183 rpart = save_expr (rpart);
9184 ipart = save_expr (ipart);
9185 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9186 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9187 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9188 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9189 build_zero_cst (itype));
9193 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9194 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9195 guarantees that P and N have the same least significant log2(M) bits.
9196 N is not otherwise constrained. In particular, N is not normalized to
9197 0 <= N < M as is common. In general, the precise value of P is unknown.
9198 M is chosen as large as possible such that constant N can be determined.
9200 Returns M and sets *RESIDUE to N.
9202 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9203 account. This is not always possible due to PR 35705.
9206 static unsigned HOST_WIDE_INT
9207 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9208 bool allow_func_align)
9210 enum tree_code code;
9214 code = TREE_CODE (expr);
9215 if (code == ADDR_EXPR)
9217 unsigned int bitalign;
9218 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9219 *residue /= BITS_PER_UNIT;
9220 return bitalign / BITS_PER_UNIT;
9222 else if (code == POINTER_PLUS_EXPR)
9225 unsigned HOST_WIDE_INT modulus;
9226 enum tree_code inner_code;
9228 op0 = TREE_OPERAND (expr, 0);
9230 modulus = get_pointer_modulus_and_residue (op0, residue,
9233 op1 = TREE_OPERAND (expr, 1);
9235 inner_code = TREE_CODE (op1);
9236 if (inner_code == INTEGER_CST)
9238 *residue += TREE_INT_CST_LOW (op1);
9241 else if (inner_code == MULT_EXPR)
9243 op1 = TREE_OPERAND (op1, 1);
9244 if (TREE_CODE (op1) == INTEGER_CST)
9246 unsigned HOST_WIDE_INT align;
9248 /* Compute the greatest power-of-2 divisor of op1. */
9249 align = TREE_INT_CST_LOW (op1);
9252 /* If align is non-zero and less than *modulus, replace
9253 *modulus with align., If align is 0, then either op1 is 0
9254 or the greatest power-of-2 divisor of op1 doesn't fit in an
9255 unsigned HOST_WIDE_INT. In either case, no additional
9256 constraint is imposed. */
9258 modulus = MIN (modulus, align);
9265 /* If we get here, we were unable to determine anything useful about the
9271 /* Fold a binary expression of code CODE and type TYPE with operands
9272 OP0 and OP1. LOC is the location of the resulting expression.
9273 Return the folded expression if folding is successful. Otherwise,
9274 return NULL_TREE. */
9277 fold_binary_loc (location_t loc,
9278 enum tree_code code, tree type, tree op0, tree op1)
9280 enum tree_code_class kind = TREE_CODE_CLASS (code);
9281 tree arg0, arg1, tem;
9282 tree t1 = NULL_TREE;
9283 bool strict_overflow_p;
9285 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9286 && TREE_CODE_LENGTH (code) == 2
9288 && op1 != NULL_TREE);
9293 /* Strip any conversions that don't change the mode. This is
9294 safe for every expression, except for a comparison expression
9295 because its signedness is derived from its operands. So, in
9296 the latter case, only strip conversions that don't change the
9297 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9300 Note that this is done as an internal manipulation within the
9301 constant folder, in order to find the simplest representation
9302 of the arguments so that their form can be studied. In any
9303 cases, the appropriate type conversions should be put back in
9304 the tree that will get out of the constant folder. */
9306 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9308 STRIP_SIGN_NOPS (arg0);
9309 STRIP_SIGN_NOPS (arg1);
9317 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9318 constant but we can't do arithmetic on them. */
9319 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9320 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9321 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9322 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9323 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9324 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9326 if (kind == tcc_binary)
9328 /* Make sure type and arg0 have the same saturating flag. */
9329 gcc_assert (TYPE_SATURATING (type)
9330 == TYPE_SATURATING (TREE_TYPE (arg0)));
9331 tem = const_binop (code, arg0, arg1);
9333 else if (kind == tcc_comparison)
9334 tem = fold_relational_const (code, type, arg0, arg1);
9338 if (tem != NULL_TREE)
9340 if (TREE_TYPE (tem) != type)
9341 tem = fold_convert_loc (loc, type, tem);
9346 /* If this is a commutative operation, and ARG0 is a constant, move it
9347 to ARG1 to reduce the number of tests below. */
9348 if (commutative_tree_code (code)
9349 && tree_swap_operands_p (arg0, arg1, true))
9350 return fold_build2_loc (loc, code, type, op1, op0);
9352 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9354 First check for cases where an arithmetic operation is applied to a
9355 compound, conditional, or comparison operation. Push the arithmetic
9356 operation inside the compound or conditional to see if any folding
9357 can then be done. Convert comparison to conditional for this purpose.
9358 The also optimizes non-constant cases that used to be done in
9361 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9362 one of the operands is a comparison and the other is a comparison, a
9363 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9364 code below would make the expression more complex. Change it to a
9365 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9366 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9368 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9369 || code == EQ_EXPR || code == NE_EXPR)
9370 && ((truth_value_p (TREE_CODE (arg0))
9371 && (truth_value_p (TREE_CODE (arg1))
9372 || (TREE_CODE (arg1) == BIT_AND_EXPR
9373 && integer_onep (TREE_OPERAND (arg1, 1)))))
9374 || (truth_value_p (TREE_CODE (arg1))
9375 && (truth_value_p (TREE_CODE (arg0))
9376 || (TREE_CODE (arg0) == BIT_AND_EXPR
9377 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9379 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9380 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9383 fold_convert_loc (loc, boolean_type_node, arg0),
9384 fold_convert_loc (loc, boolean_type_node, arg1));
9386 if (code == EQ_EXPR)
9387 tem = invert_truthvalue_loc (loc, tem);
9389 return fold_convert_loc (loc, type, tem);
9392 if (TREE_CODE_CLASS (code) == tcc_binary
9393 || TREE_CODE_CLASS (code) == tcc_comparison)
9395 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9397 tem = fold_build2_loc (loc, code, type,
9398 fold_convert_loc (loc, TREE_TYPE (op0),
9399 TREE_OPERAND (arg0, 1)), op1);
9400 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9403 if (TREE_CODE (arg1) == COMPOUND_EXPR
9404 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9406 tem = fold_build2_loc (loc, code, type, op0,
9407 fold_convert_loc (loc, TREE_TYPE (op1),
9408 TREE_OPERAND (arg1, 1)));
9409 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9413 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9415 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9417 /*cond_first_p=*/1);
9418 if (tem != NULL_TREE)
9422 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9424 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9426 /*cond_first_p=*/0);
9427 if (tem != NULL_TREE)
9435 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9436 if (TREE_CODE (arg0) == ADDR_EXPR
9437 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9439 tree iref = TREE_OPERAND (arg0, 0);
9440 return fold_build2 (MEM_REF, type,
9441 TREE_OPERAND (iref, 0),
9442 int_const_binop (PLUS_EXPR, arg1,
9443 TREE_OPERAND (iref, 1)));
9446 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9447 if (TREE_CODE (arg0) == ADDR_EXPR
9448 && handled_component_p (TREE_OPERAND (arg0, 0)))
9451 HOST_WIDE_INT coffset;
9452 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9456 return fold_build2 (MEM_REF, type,
9457 build_fold_addr_expr (base),
9458 int_const_binop (PLUS_EXPR, arg1,
9459 size_int (coffset)));
9464 case POINTER_PLUS_EXPR:
9465 /* 0 +p index -> (type)index */
9466 if (integer_zerop (arg0))
9467 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9469 /* PTR +p 0 -> PTR */
9470 if (integer_zerop (arg1))
9471 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9473 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9474 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9475 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9476 return fold_convert_loc (loc, type,
9477 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9478 fold_convert_loc (loc, sizetype,
9480 fold_convert_loc (loc, sizetype,
9483 /* index +p PTR -> PTR +p index */
9484 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9485 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9486 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9487 fold_convert_loc (loc, type, arg1),
9488 fold_convert_loc (loc, sizetype, arg0));
9490 /* (PTR +p B) +p A -> PTR +p (B + A) */
9491 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9494 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9495 tree arg00 = TREE_OPERAND (arg0, 0);
9496 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9497 arg01, fold_convert_loc (loc, sizetype, arg1));
9498 return fold_convert_loc (loc, type,
9499 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9504 /* PTR_CST +p CST -> CST1 */
9505 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9506 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9507 fold_convert_loc (loc, type, arg1));
9509 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9510 of the array. Loop optimizer sometimes produce this type of
9512 if (TREE_CODE (arg0) == ADDR_EXPR)
9514 tem = try_move_mult_to_index (loc, arg0,
9515 fold_convert_loc (loc, sizetype, arg1));
9517 return fold_convert_loc (loc, type, tem);
9523 /* A + (-B) -> A - B */
9524 if (TREE_CODE (arg1) == NEGATE_EXPR)
9525 return fold_build2_loc (loc, MINUS_EXPR, type,
9526 fold_convert_loc (loc, type, arg0),
9527 fold_convert_loc (loc, type,
9528 TREE_OPERAND (arg1, 0)));
9529 /* (-A) + B -> B - A */
9530 if (TREE_CODE (arg0) == NEGATE_EXPR
9531 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9532 return fold_build2_loc (loc, MINUS_EXPR, type,
9533 fold_convert_loc (loc, type, arg1),
9534 fold_convert_loc (loc, type,
9535 TREE_OPERAND (arg0, 0)));
9537 if (INTEGRAL_TYPE_P (type))
9539 /* Convert ~A + 1 to -A. */
9540 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9541 && integer_onep (arg1))
9542 return fold_build1_loc (loc, NEGATE_EXPR, type,
9543 fold_convert_loc (loc, type,
9544 TREE_OPERAND (arg0, 0)));
9547 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9548 && !TYPE_OVERFLOW_TRAPS (type))
9550 tree tem = TREE_OPERAND (arg0, 0);
9553 if (operand_equal_p (tem, arg1, 0))
9555 t1 = build_int_cst_type (type, -1);
9556 return omit_one_operand_loc (loc, type, t1, arg1);
9561 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9562 && !TYPE_OVERFLOW_TRAPS (type))
9564 tree tem = TREE_OPERAND (arg1, 0);
9567 if (operand_equal_p (arg0, tem, 0))
9569 t1 = build_int_cst_type (type, -1);
9570 return omit_one_operand_loc (loc, type, t1, arg0);
9574 /* X + (X / CST) * -CST is X % CST. */
9575 if (TREE_CODE (arg1) == MULT_EXPR
9576 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9577 && operand_equal_p (arg0,
9578 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9580 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9581 tree cst1 = TREE_OPERAND (arg1, 1);
9582 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9584 if (sum && integer_zerop (sum))
9585 return fold_convert_loc (loc, type,
9586 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9587 TREE_TYPE (arg0), arg0,
9592 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9593 same or one. Make sure type is not saturating.
9594 fold_plusminus_mult_expr will re-associate. */
9595 if ((TREE_CODE (arg0) == MULT_EXPR
9596 || TREE_CODE (arg1) == MULT_EXPR)
9597 && !TYPE_SATURATING (type)
9598 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9600 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9605 if (! FLOAT_TYPE_P (type))
9607 if (integer_zerop (arg1))
9608 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9610 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9611 with a constant, and the two constants have no bits in common,
9612 we should treat this as a BIT_IOR_EXPR since this may produce more
9614 if (TREE_CODE (arg0) == BIT_AND_EXPR
9615 && TREE_CODE (arg1) == BIT_AND_EXPR
9616 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9617 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9618 && integer_zerop (const_binop (BIT_AND_EXPR,
9619 TREE_OPERAND (arg0, 1),
9620 TREE_OPERAND (arg1, 1))))
9622 code = BIT_IOR_EXPR;
9626 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9627 (plus (plus (mult) (mult)) (foo)) so that we can
9628 take advantage of the factoring cases below. */
9629 if (((TREE_CODE (arg0) == PLUS_EXPR
9630 || TREE_CODE (arg0) == MINUS_EXPR)
9631 && TREE_CODE (arg1) == MULT_EXPR)
9632 || ((TREE_CODE (arg1) == PLUS_EXPR
9633 || TREE_CODE (arg1) == MINUS_EXPR)
9634 && TREE_CODE (arg0) == MULT_EXPR))
9636 tree parg0, parg1, parg, marg;
9637 enum tree_code pcode;
9639 if (TREE_CODE (arg1) == MULT_EXPR)
9640 parg = arg0, marg = arg1;
9642 parg = arg1, marg = arg0;
9643 pcode = TREE_CODE (parg);
9644 parg0 = TREE_OPERAND (parg, 0);
9645 parg1 = TREE_OPERAND (parg, 1);
9649 if (TREE_CODE (parg0) == MULT_EXPR
9650 && TREE_CODE (parg1) != MULT_EXPR)
9651 return fold_build2_loc (loc, pcode, type,
9652 fold_build2_loc (loc, PLUS_EXPR, type,
9653 fold_convert_loc (loc, type,
9655 fold_convert_loc (loc, type,
9657 fold_convert_loc (loc, type, parg1));
9658 if (TREE_CODE (parg0) != MULT_EXPR
9659 && TREE_CODE (parg1) == MULT_EXPR)
9661 fold_build2_loc (loc, PLUS_EXPR, type,
9662 fold_convert_loc (loc, type, parg0),
9663 fold_build2_loc (loc, pcode, type,
9664 fold_convert_loc (loc, type, marg),
9665 fold_convert_loc (loc, type,
9671 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9672 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9673 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9675 /* Likewise if the operands are reversed. */
9676 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9677 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9679 /* Convert X + -C into X - C. */
9680 if (TREE_CODE (arg1) == REAL_CST
9681 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9683 tem = fold_negate_const (arg1, type);
9684 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9685 return fold_build2_loc (loc, MINUS_EXPR, type,
9686 fold_convert_loc (loc, type, arg0),
9687 fold_convert_loc (loc, type, tem));
9690 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9691 to __complex__ ( x, y ). This is not the same for SNaNs or
9692 if signed zeros are involved. */
9693 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9694 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9695 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9697 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9698 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9699 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9700 bool arg0rz = false, arg0iz = false;
9701 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9702 || (arg0i && (arg0iz = real_zerop (arg0i))))
9704 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9705 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9706 if (arg0rz && arg1i && real_zerop (arg1i))
9708 tree rp = arg1r ? arg1r
9709 : build1 (REALPART_EXPR, rtype, arg1);
9710 tree ip = arg0i ? arg0i
9711 : build1 (IMAGPART_EXPR, rtype, arg0);
9712 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9714 else if (arg0iz && arg1r && real_zerop (arg1r))
9716 tree rp = arg0r ? arg0r
9717 : build1 (REALPART_EXPR, rtype, arg0);
9718 tree ip = arg1i ? arg1i
9719 : build1 (IMAGPART_EXPR, rtype, arg1);
9720 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9725 if (flag_unsafe_math_optimizations
9726 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9727 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9728 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9731 /* Convert x+x into x*2.0. */
9732 if (operand_equal_p (arg0, arg1, 0)
9733 && SCALAR_FLOAT_TYPE_P (type))
9734 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9735 build_real (type, dconst2));
9737 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9738 We associate floats only if the user has specified
9739 -fassociative-math. */
9740 if (flag_associative_math
9741 && TREE_CODE (arg1) == PLUS_EXPR
9742 && TREE_CODE (arg0) != MULT_EXPR)
9744 tree tree10 = TREE_OPERAND (arg1, 0);
9745 tree tree11 = TREE_OPERAND (arg1, 1);
9746 if (TREE_CODE (tree11) == MULT_EXPR
9747 && TREE_CODE (tree10) == MULT_EXPR)
9750 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9751 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9754 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9755 We associate floats only if the user has specified
9756 -fassociative-math. */
9757 if (flag_associative_math
9758 && TREE_CODE (arg0) == PLUS_EXPR
9759 && TREE_CODE (arg1) != MULT_EXPR)
9761 tree tree00 = TREE_OPERAND (arg0, 0);
9762 tree tree01 = TREE_OPERAND (arg0, 1);
9763 if (TREE_CODE (tree01) == MULT_EXPR
9764 && TREE_CODE (tree00) == MULT_EXPR)
9767 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9768 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9774 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9775 is a rotate of A by C1 bits. */
9776 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9777 is a rotate of A by B bits. */
9779 enum tree_code code0, code1;
9781 code0 = TREE_CODE (arg0);
9782 code1 = TREE_CODE (arg1);
9783 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9784 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9785 && operand_equal_p (TREE_OPERAND (arg0, 0),
9786 TREE_OPERAND (arg1, 0), 0)
9787 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9788 TYPE_UNSIGNED (rtype))
9789 /* Only create rotates in complete modes. Other cases are not
9790 expanded properly. */
9791 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9793 tree tree01, tree11;
9794 enum tree_code code01, code11;
9796 tree01 = TREE_OPERAND (arg0, 1);
9797 tree11 = TREE_OPERAND (arg1, 1);
9798 STRIP_NOPS (tree01);
9799 STRIP_NOPS (tree11);
9800 code01 = TREE_CODE (tree01);
9801 code11 = TREE_CODE (tree11);
9802 if (code01 == INTEGER_CST
9803 && code11 == INTEGER_CST
9804 && TREE_INT_CST_HIGH (tree01) == 0
9805 && TREE_INT_CST_HIGH (tree11) == 0
9806 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9807 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9809 tem = build2_loc (loc, LROTATE_EXPR,
9810 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9811 TREE_OPERAND (arg0, 0),
9812 code0 == LSHIFT_EXPR ? tree01 : tree11);
9813 return fold_convert_loc (loc, type, tem);
9815 else if (code11 == MINUS_EXPR)
9817 tree tree110, tree111;
9818 tree110 = TREE_OPERAND (tree11, 0);
9819 tree111 = TREE_OPERAND (tree11, 1);
9820 STRIP_NOPS (tree110);
9821 STRIP_NOPS (tree111);
9822 if (TREE_CODE (tree110) == INTEGER_CST
9823 && 0 == compare_tree_int (tree110,
9825 (TREE_TYPE (TREE_OPERAND
9827 && operand_equal_p (tree01, tree111, 0))
9829 fold_convert_loc (loc, type,
9830 build2 ((code0 == LSHIFT_EXPR
9833 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9834 TREE_OPERAND (arg0, 0), tree01));
9836 else if (code01 == MINUS_EXPR)
9838 tree tree010, tree011;
9839 tree010 = TREE_OPERAND (tree01, 0);
9840 tree011 = TREE_OPERAND (tree01, 1);
9841 STRIP_NOPS (tree010);
9842 STRIP_NOPS (tree011);
9843 if (TREE_CODE (tree010) == INTEGER_CST
9844 && 0 == compare_tree_int (tree010,
9846 (TREE_TYPE (TREE_OPERAND
9848 && operand_equal_p (tree11, tree011, 0))
9849 return fold_convert_loc
9851 build2 ((code0 != LSHIFT_EXPR
9854 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9855 TREE_OPERAND (arg0, 0), tree11));
9861 /* In most languages, can't associate operations on floats through
9862 parentheses. Rather than remember where the parentheses were, we
9863 don't associate floats at all, unless the user has specified
9865 And, we need to make sure type is not saturating. */
9867 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9868 && !TYPE_SATURATING (type))
9870 tree var0, con0, lit0, minus_lit0;
9871 tree var1, con1, lit1, minus_lit1;
9874 /* Split both trees into variables, constants, and literals. Then
9875 associate each group together, the constants with literals,
9876 then the result with variables. This increases the chances of
9877 literals being recombined later and of generating relocatable
9878 expressions for the sum of a constant and literal. */
9879 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9880 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9881 code == MINUS_EXPR);
9883 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9884 if (code == MINUS_EXPR)
9887 /* With undefined overflow we can only associate constants with one
9888 variable, and constants whose association doesn't overflow. */
9889 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9890 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9897 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9898 tmp0 = TREE_OPERAND (tmp0, 0);
9899 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9900 tmp1 = TREE_OPERAND (tmp1, 0);
9901 /* The only case we can still associate with two variables
9902 is if they are the same, modulo negation. */
9903 if (!operand_equal_p (tmp0, tmp1, 0))
9907 if (ok && lit0 && lit1)
9909 tree tmp0 = fold_convert (type, lit0);
9910 tree tmp1 = fold_convert (type, lit1);
9912 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9913 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9918 /* Only do something if we found more than two objects. Otherwise,
9919 nothing has changed and we risk infinite recursion. */
9921 && (2 < ((var0 != 0) + (var1 != 0)
9922 + (con0 != 0) + (con1 != 0)
9923 + (lit0 != 0) + (lit1 != 0)
9924 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9926 var0 = associate_trees (loc, var0, var1, code, type);
9927 con0 = associate_trees (loc, con0, con1, code, type);
9928 lit0 = associate_trees (loc, lit0, lit1, code, type);
9929 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9931 /* Preserve the MINUS_EXPR if the negative part of the literal is
9932 greater than the positive part. Otherwise, the multiplicative
9933 folding code (i.e extract_muldiv) may be fooled in case
9934 unsigned constants are subtracted, like in the following
9935 example: ((X*2 + 4) - 8U)/2. */
9936 if (minus_lit0 && lit0)
9938 if (TREE_CODE (lit0) == INTEGER_CST
9939 && TREE_CODE (minus_lit0) == INTEGER_CST
9940 && tree_int_cst_lt (lit0, minus_lit0))
9942 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9948 lit0 = associate_trees (loc, lit0, minus_lit0,
9957 fold_convert_loc (loc, type,
9958 associate_trees (loc, var0, minus_lit0,
9962 con0 = associate_trees (loc, con0, minus_lit0,
9965 fold_convert_loc (loc, type,
9966 associate_trees (loc, var0, con0,
9971 con0 = associate_trees (loc, con0, lit0, code, type);
9973 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9981 /* Pointer simplifications for subtraction, simple reassociations. */
9982 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9984 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9985 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9986 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9988 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9989 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9990 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9991 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9992 return fold_build2_loc (loc, PLUS_EXPR, type,
9993 fold_build2_loc (loc, MINUS_EXPR, type,
9995 fold_build2_loc (loc, MINUS_EXPR, type,
9998 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9999 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10001 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10002 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10003 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10004 fold_convert_loc (loc, type, arg1));
10006 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10009 /* A - (-B) -> A + B */
10010 if (TREE_CODE (arg1) == NEGATE_EXPR)
10011 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10012 fold_convert_loc (loc, type,
10013 TREE_OPERAND (arg1, 0)));
10014 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10015 if (TREE_CODE (arg0) == NEGATE_EXPR
10016 && (FLOAT_TYPE_P (type)
10017 || INTEGRAL_TYPE_P (type))
10018 && negate_expr_p (arg1)
10019 && reorder_operands_p (arg0, arg1))
10020 return fold_build2_loc (loc, MINUS_EXPR, type,
10021 fold_convert_loc (loc, type,
10022 negate_expr (arg1)),
10023 fold_convert_loc (loc, type,
10024 TREE_OPERAND (arg0, 0)));
10025 /* Convert -A - 1 to ~A. */
10026 if (INTEGRAL_TYPE_P (type)
10027 && TREE_CODE (arg0) == NEGATE_EXPR
10028 && integer_onep (arg1)
10029 && !TYPE_OVERFLOW_TRAPS (type))
10030 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10031 fold_convert_loc (loc, type,
10032 TREE_OPERAND (arg0, 0)));
10034 /* Convert -1 - A to ~A. */
10035 if (INTEGRAL_TYPE_P (type)
10036 && integer_all_onesp (arg0))
10037 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10040 /* X - (X / CST) * CST is X % CST. */
10041 if (INTEGRAL_TYPE_P (type)
10042 && TREE_CODE (arg1) == MULT_EXPR
10043 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10044 && operand_equal_p (arg0,
10045 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10046 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10047 TREE_OPERAND (arg1, 1), 0))
10049 fold_convert_loc (loc, type,
10050 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10051 arg0, TREE_OPERAND (arg1, 1)));
10053 if (! FLOAT_TYPE_P (type))
10055 if (integer_zerop (arg0))
10056 return negate_expr (fold_convert_loc (loc, type, arg1));
10057 if (integer_zerop (arg1))
10058 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10060 /* Fold A - (A & B) into ~B & A. */
10061 if (!TREE_SIDE_EFFECTS (arg0)
10062 && TREE_CODE (arg1) == BIT_AND_EXPR)
10064 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10066 tree arg10 = fold_convert_loc (loc, type,
10067 TREE_OPERAND (arg1, 0));
10068 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10069 fold_build1_loc (loc, BIT_NOT_EXPR,
10071 fold_convert_loc (loc, type, arg0));
10073 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10075 tree arg11 = fold_convert_loc (loc,
10076 type, TREE_OPERAND (arg1, 1));
10077 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10078 fold_build1_loc (loc, BIT_NOT_EXPR,
10080 fold_convert_loc (loc, type, arg0));
10084 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10085 any power of 2 minus 1. */
10086 if (TREE_CODE (arg0) == BIT_AND_EXPR
10087 && TREE_CODE (arg1) == BIT_AND_EXPR
10088 && operand_equal_p (TREE_OPERAND (arg0, 0),
10089 TREE_OPERAND (arg1, 0), 0))
10091 tree mask0 = TREE_OPERAND (arg0, 1);
10092 tree mask1 = TREE_OPERAND (arg1, 1);
10093 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10095 if (operand_equal_p (tem, mask1, 0))
10097 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10098 TREE_OPERAND (arg0, 0), mask1);
10099 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10104 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10105 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10106 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10108 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10109 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10110 (-ARG1 + ARG0) reduces to -ARG1. */
10111 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10112 return negate_expr (fold_convert_loc (loc, type, arg1));
10114 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10115 __complex__ ( x, -y ). This is not the same for SNaNs or if
10116 signed zeros are involved. */
10117 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10118 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10119 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10121 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10122 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10123 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10124 bool arg0rz = false, arg0iz = false;
10125 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10126 || (arg0i && (arg0iz = real_zerop (arg0i))))
10128 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10129 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10130 if (arg0rz && arg1i && real_zerop (arg1i))
10132 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10134 : build1 (REALPART_EXPR, rtype, arg1));
10135 tree ip = arg0i ? arg0i
10136 : build1 (IMAGPART_EXPR, rtype, arg0);
10137 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10139 else if (arg0iz && arg1r && real_zerop (arg1r))
10141 tree rp = arg0r ? arg0r
10142 : build1 (REALPART_EXPR, rtype, arg0);
10143 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10145 : build1 (IMAGPART_EXPR, rtype, arg1));
10146 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10151 /* Fold &x - &x. This can happen from &x.foo - &x.
10152 This is unsafe for certain floats even in non-IEEE formats.
10153 In IEEE, it is unsafe because it does wrong for NaNs.
10154 Also note that operand_equal_p is always false if an operand
10157 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10158 && operand_equal_p (arg0, arg1, 0))
10159 return build_zero_cst (type);
10161 /* A - B -> A + (-B) if B is easily negatable. */
10162 if (negate_expr_p (arg1)
10163 && ((FLOAT_TYPE_P (type)
10164 /* Avoid this transformation if B is a positive REAL_CST. */
10165 && (TREE_CODE (arg1) != REAL_CST
10166 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10167 || INTEGRAL_TYPE_P (type)))
10168 return fold_build2_loc (loc, PLUS_EXPR, type,
10169 fold_convert_loc (loc, type, arg0),
10170 fold_convert_loc (loc, type,
10171 negate_expr (arg1)));
10173 /* Try folding difference of addresses. */
10175 HOST_WIDE_INT diff;
10177 if ((TREE_CODE (arg0) == ADDR_EXPR
10178 || TREE_CODE (arg1) == ADDR_EXPR)
10179 && ptr_difference_const (arg0, arg1, &diff))
10180 return build_int_cst_type (type, diff);
10183 /* Fold &a[i] - &a[j] to i-j. */
10184 if (TREE_CODE (arg0) == ADDR_EXPR
10185 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10186 && TREE_CODE (arg1) == ADDR_EXPR
10187 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10189 tree aref0 = TREE_OPERAND (arg0, 0);
10190 tree aref1 = TREE_OPERAND (arg1, 0);
10191 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10192 TREE_OPERAND (aref1, 0), 0))
10194 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10195 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10196 tree esz = array_ref_element_size (aref0);
10197 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10198 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10199 fold_convert_loc (loc, type, esz));
10204 if (FLOAT_TYPE_P (type)
10205 && flag_unsafe_math_optimizations
10206 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10207 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10208 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10211 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10212 same or one. Make sure type is not saturating.
10213 fold_plusminus_mult_expr will re-associate. */
10214 if ((TREE_CODE (arg0) == MULT_EXPR
10215 || TREE_CODE (arg1) == MULT_EXPR)
10216 && !TYPE_SATURATING (type)
10217 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10219 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10227 /* (-A) * (-B) -> A * B */
10228 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10229 return fold_build2_loc (loc, MULT_EXPR, type,
10230 fold_convert_loc (loc, type,
10231 TREE_OPERAND (arg0, 0)),
10232 fold_convert_loc (loc, type,
10233 negate_expr (arg1)));
10234 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10235 return fold_build2_loc (loc, MULT_EXPR, type,
10236 fold_convert_loc (loc, type,
10237 negate_expr (arg0)),
10238 fold_convert_loc (loc, type,
10239 TREE_OPERAND (arg1, 0)));
10241 if (! FLOAT_TYPE_P (type))
10243 if (integer_zerop (arg1))
10244 return omit_one_operand_loc (loc, type, arg1, arg0);
10245 if (integer_onep (arg1))
10246 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10247 /* Transform x * -1 into -x. Make sure to do the negation
10248 on the original operand with conversions not stripped
10249 because we can only strip non-sign-changing conversions. */
10250 if (integer_all_onesp (arg1))
10251 return fold_convert_loc (loc, type, negate_expr (op0));
10252 /* Transform x * -C into -x * C if x is easily negatable. */
10253 if (TREE_CODE (arg1) == INTEGER_CST
10254 && tree_int_cst_sgn (arg1) == -1
10255 && negate_expr_p (arg0)
10256 && (tem = negate_expr (arg1)) != arg1
10257 && !TREE_OVERFLOW (tem))
10258 return fold_build2_loc (loc, MULT_EXPR, type,
10259 fold_convert_loc (loc, type,
10260 negate_expr (arg0)),
10263 /* (a * (1 << b)) is (a << b) */
10264 if (TREE_CODE (arg1) == LSHIFT_EXPR
10265 && integer_onep (TREE_OPERAND (arg1, 0)))
10266 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10267 TREE_OPERAND (arg1, 1));
10268 if (TREE_CODE (arg0) == LSHIFT_EXPR
10269 && integer_onep (TREE_OPERAND (arg0, 0)))
10270 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10271 TREE_OPERAND (arg0, 1));
10273 /* (A + A) * C -> A * 2 * C */
10274 if (TREE_CODE (arg0) == PLUS_EXPR
10275 && TREE_CODE (arg1) == INTEGER_CST
10276 && operand_equal_p (TREE_OPERAND (arg0, 0),
10277 TREE_OPERAND (arg0, 1), 0))
10278 return fold_build2_loc (loc, MULT_EXPR, type,
10279 omit_one_operand_loc (loc, type,
10280 TREE_OPERAND (arg0, 0),
10281 TREE_OPERAND (arg0, 1)),
10282 fold_build2_loc (loc, MULT_EXPR, type,
10283 build_int_cst (type, 2) , arg1));
10285 strict_overflow_p = false;
10286 if (TREE_CODE (arg1) == INTEGER_CST
10287 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10288 &strict_overflow_p)))
10290 if (strict_overflow_p)
10291 fold_overflow_warning (("assuming signed overflow does not "
10292 "occur when simplifying "
10294 WARN_STRICT_OVERFLOW_MISC);
10295 return fold_convert_loc (loc, type, tem);
10298 /* Optimize z * conj(z) for integer complex numbers. */
10299 if (TREE_CODE (arg0) == CONJ_EXPR
10300 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10301 return fold_mult_zconjz (loc, type, arg1);
10302 if (TREE_CODE (arg1) == CONJ_EXPR
10303 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10304 return fold_mult_zconjz (loc, type, arg0);
10308 /* Maybe fold x * 0 to 0. The expressions aren't the same
10309 when x is NaN, since x * 0 is also NaN. Nor are they the
10310 same in modes with signed zeros, since multiplying a
10311 negative value by 0 gives -0, not +0. */
10312 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10313 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10314 && real_zerop (arg1))
10315 return omit_one_operand_loc (loc, type, arg1, arg0);
10316 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10317 Likewise for complex arithmetic with signed zeros. */
10318 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10319 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10320 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10321 && real_onep (arg1))
10322 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10324 /* Transform x * -1.0 into -x. */
10325 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10326 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10327 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10328 && real_minus_onep (arg1))
10329 return fold_convert_loc (loc, type, negate_expr (arg0));
10331 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10332 the result for floating point types due to rounding so it is applied
10333 only if -fassociative-math was specify. */
10334 if (flag_associative_math
10335 && TREE_CODE (arg0) == RDIV_EXPR
10336 && TREE_CODE (arg1) == REAL_CST
10337 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10339 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10342 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10343 TREE_OPERAND (arg0, 1));
10346 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10347 if (operand_equal_p (arg0, arg1, 0))
10349 tree tem = fold_strip_sign_ops (arg0);
10350 if (tem != NULL_TREE)
10352 tem = fold_convert_loc (loc, type, tem);
10353 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10357 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10358 This is not the same for NaNs or if signed zeros are
10360 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10361 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10362 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10363 && TREE_CODE (arg1) == COMPLEX_CST
10364 && real_zerop (TREE_REALPART (arg1)))
10366 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10367 if (real_onep (TREE_IMAGPART (arg1)))
10369 fold_build2_loc (loc, COMPLEX_EXPR, type,
10370 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10372 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10373 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10375 fold_build2_loc (loc, COMPLEX_EXPR, type,
10376 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10377 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10381 /* Optimize z * conj(z) for floating point complex numbers.
10382 Guarded by flag_unsafe_math_optimizations as non-finite
10383 imaginary components don't produce scalar results. */
10384 if (flag_unsafe_math_optimizations
10385 && TREE_CODE (arg0) == CONJ_EXPR
10386 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10387 return fold_mult_zconjz (loc, type, arg1);
10388 if (flag_unsafe_math_optimizations
10389 && TREE_CODE (arg1) == CONJ_EXPR
10390 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10391 return fold_mult_zconjz (loc, type, arg0);
10393 if (flag_unsafe_math_optimizations)
10395 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10396 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10398 /* Optimizations of root(...)*root(...). */
10399 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10402 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10403 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10405 /* Optimize sqrt(x)*sqrt(x) as x. */
10406 if (BUILTIN_SQRT_P (fcode0)
10407 && operand_equal_p (arg00, arg10, 0)
10408 && ! HONOR_SNANS (TYPE_MODE (type)))
10411 /* Optimize root(x)*root(y) as root(x*y). */
10412 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10413 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10414 return build_call_expr_loc (loc, rootfn, 1, arg);
10417 /* Optimize expN(x)*expN(y) as expN(x+y). */
10418 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10420 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10421 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10422 CALL_EXPR_ARG (arg0, 0),
10423 CALL_EXPR_ARG (arg1, 0));
10424 return build_call_expr_loc (loc, expfn, 1, arg);
10427 /* Optimizations of pow(...)*pow(...). */
10428 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10429 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10430 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10432 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10433 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10434 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10435 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10437 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10438 if (operand_equal_p (arg01, arg11, 0))
10440 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10441 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10443 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10446 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10447 if (operand_equal_p (arg00, arg10, 0))
10449 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10450 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10452 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10456 /* Optimize tan(x)*cos(x) as sin(x). */
10457 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10458 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10459 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10460 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10461 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10462 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10463 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10464 CALL_EXPR_ARG (arg1, 0), 0))
10466 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10468 if (sinfn != NULL_TREE)
10469 return build_call_expr_loc (loc, sinfn, 1,
10470 CALL_EXPR_ARG (arg0, 0));
10473 /* Optimize x*pow(x,c) as pow(x,c+1). */
10474 if (fcode1 == BUILT_IN_POW
10475 || fcode1 == BUILT_IN_POWF
10476 || fcode1 == BUILT_IN_POWL)
10478 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10479 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10480 if (TREE_CODE (arg11) == REAL_CST
10481 && !TREE_OVERFLOW (arg11)
10482 && operand_equal_p (arg0, arg10, 0))
10484 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10488 c = TREE_REAL_CST (arg11);
10489 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10490 arg = build_real (type, c);
10491 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10495 /* Optimize pow(x,c)*x as pow(x,c+1). */
10496 if (fcode0 == BUILT_IN_POW
10497 || fcode0 == BUILT_IN_POWF
10498 || fcode0 == BUILT_IN_POWL)
10500 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10501 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10502 if (TREE_CODE (arg01) == REAL_CST
10503 && !TREE_OVERFLOW (arg01)
10504 && operand_equal_p (arg1, arg00, 0))
10506 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10510 c = TREE_REAL_CST (arg01);
10511 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10512 arg = build_real (type, c);
10513 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10517 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10518 if (optimize_function_for_speed_p (cfun)
10519 && operand_equal_p (arg0, arg1, 0))
10521 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10525 tree arg = build_real (type, dconst2);
10526 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10535 if (integer_all_onesp (arg1))
10536 return omit_one_operand_loc (loc, type, arg1, arg0);
10537 if (integer_zerop (arg1))
10538 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10539 if (operand_equal_p (arg0, arg1, 0))
10540 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10542 /* ~X | X is -1. */
10543 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10544 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10546 t1 = build_zero_cst (type);
10547 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10548 return omit_one_operand_loc (loc, type, t1, arg1);
10551 /* X | ~X is -1. */
10552 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10553 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10555 t1 = build_zero_cst (type);
10556 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10557 return omit_one_operand_loc (loc, type, t1, arg0);
10560 /* Canonicalize (X & C1) | C2. */
10561 if (TREE_CODE (arg0) == BIT_AND_EXPR
10562 && TREE_CODE (arg1) == INTEGER_CST
10563 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10565 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10566 int width = TYPE_PRECISION (type), w;
10567 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10568 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10569 hi2 = TREE_INT_CST_HIGH (arg1);
10570 lo2 = TREE_INT_CST_LOW (arg1);
10572 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10573 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10574 return omit_one_operand_loc (loc, type, arg1,
10575 TREE_OPERAND (arg0, 0));
10577 if (width > HOST_BITS_PER_WIDE_INT)
10579 mhi = (unsigned HOST_WIDE_INT) -1
10580 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10586 mlo = (unsigned HOST_WIDE_INT) -1
10587 >> (HOST_BITS_PER_WIDE_INT - width);
10590 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10591 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10592 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10593 TREE_OPERAND (arg0, 0), arg1);
10595 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10596 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10597 mode which allows further optimizations. */
10604 for (w = BITS_PER_UNIT;
10605 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10608 unsigned HOST_WIDE_INT mask
10609 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10610 if (((lo1 | lo2) & mask) == mask
10611 && (lo1 & ~mask) == 0 && hi1 == 0)
10618 if (hi3 != hi1 || lo3 != lo1)
10619 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10620 fold_build2_loc (loc, BIT_AND_EXPR, type,
10621 TREE_OPERAND (arg0, 0),
10622 build_int_cst_wide (type,
10627 /* (X & Y) | Y is (X, Y). */
10628 if (TREE_CODE (arg0) == BIT_AND_EXPR
10629 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10630 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10631 /* (X & Y) | X is (Y, X). */
10632 if (TREE_CODE (arg0) == BIT_AND_EXPR
10633 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10634 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10635 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10636 /* X | (X & Y) is (Y, X). */
10637 if (TREE_CODE (arg1) == BIT_AND_EXPR
10638 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10639 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10640 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10641 /* X | (Y & X) is (Y, X). */
10642 if (TREE_CODE (arg1) == BIT_AND_EXPR
10643 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10644 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10645 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10647 /* (X & ~Y) | (~X & Y) is X ^ Y */
10648 if (TREE_CODE (arg0) == BIT_AND_EXPR
10649 && TREE_CODE (arg1) == BIT_AND_EXPR)
10651 tree a0, a1, l0, l1, n0, n1;
10653 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10654 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10656 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10657 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10659 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10660 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10662 if ((operand_equal_p (n0, a0, 0)
10663 && operand_equal_p (n1, a1, 0))
10664 || (operand_equal_p (n0, a1, 0)
10665 && operand_equal_p (n1, a0, 0)))
10666 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10669 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10670 if (t1 != NULL_TREE)
10673 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10675 This results in more efficient code for machines without a NAND
10676 instruction. Combine will canonicalize to the first form
10677 which will allow use of NAND instructions provided by the
10678 backend if they exist. */
10679 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10680 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10683 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10684 build2 (BIT_AND_EXPR, type,
10685 fold_convert_loc (loc, type,
10686 TREE_OPERAND (arg0, 0)),
10687 fold_convert_loc (loc, type,
10688 TREE_OPERAND (arg1, 0))));
10691 /* See if this can be simplified into a rotate first. If that
10692 is unsuccessful continue in the association code. */
10696 if (integer_zerop (arg1))
10697 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10698 if (integer_all_onesp (arg1))
10699 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10700 if (operand_equal_p (arg0, arg1, 0))
10701 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10703 /* ~X ^ X is -1. */
10704 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10705 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10707 t1 = build_zero_cst (type);
10708 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10709 return omit_one_operand_loc (loc, type, t1, arg1);
10712 /* X ^ ~X is -1. */
10713 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10714 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10716 t1 = build_zero_cst (type);
10717 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10718 return omit_one_operand_loc (loc, type, t1, arg0);
10721 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10722 with a constant, and the two constants have no bits in common,
10723 we should treat this as a BIT_IOR_EXPR since this may produce more
10724 simplifications. */
10725 if (TREE_CODE (arg0) == BIT_AND_EXPR
10726 && TREE_CODE (arg1) == BIT_AND_EXPR
10727 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10728 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10729 && integer_zerop (const_binop (BIT_AND_EXPR,
10730 TREE_OPERAND (arg0, 1),
10731 TREE_OPERAND (arg1, 1))))
10733 code = BIT_IOR_EXPR;
10737 /* (X | Y) ^ X -> Y & ~ X*/
10738 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10739 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10741 tree t2 = TREE_OPERAND (arg0, 1);
10742 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10744 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10745 fold_convert_loc (loc, type, t2),
10746 fold_convert_loc (loc, type, t1));
10750 /* (Y | X) ^ X -> Y & ~ X*/
10751 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10752 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10754 tree t2 = TREE_OPERAND (arg0, 0);
10755 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10757 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10758 fold_convert_loc (loc, type, t2),
10759 fold_convert_loc (loc, type, t1));
10763 /* X ^ (X | Y) -> Y & ~ X*/
10764 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10765 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10767 tree t2 = TREE_OPERAND (arg1, 1);
10768 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10770 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10771 fold_convert_loc (loc, type, t2),
10772 fold_convert_loc (loc, type, t1));
10776 /* X ^ (Y | X) -> Y & ~ X*/
10777 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10778 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10780 tree t2 = TREE_OPERAND (arg1, 0);
10781 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10783 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10784 fold_convert_loc (loc, type, t2),
10785 fold_convert_loc (loc, type, t1));
10789 /* Convert ~X ^ ~Y to X ^ Y. */
10790 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10791 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10792 return fold_build2_loc (loc, code, type,
10793 fold_convert_loc (loc, type,
10794 TREE_OPERAND (arg0, 0)),
10795 fold_convert_loc (loc, type,
10796 TREE_OPERAND (arg1, 0)));
10798 /* Convert ~X ^ C to X ^ ~C. */
10799 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10800 && TREE_CODE (arg1) == INTEGER_CST)
10801 return fold_build2_loc (loc, code, type,
10802 fold_convert_loc (loc, type,
10803 TREE_OPERAND (arg0, 0)),
10804 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10806 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10807 if (TREE_CODE (arg0) == BIT_AND_EXPR
10808 && integer_onep (TREE_OPERAND (arg0, 1))
10809 && integer_onep (arg1))
10810 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10811 build_int_cst (TREE_TYPE (arg0), 0));
10813 /* Fold (X & Y) ^ Y as ~X & Y. */
10814 if (TREE_CODE (arg0) == BIT_AND_EXPR
10815 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10817 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10818 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10819 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10820 fold_convert_loc (loc, type, arg1));
10822 /* Fold (X & Y) ^ X as ~Y & X. */
10823 if (TREE_CODE (arg0) == BIT_AND_EXPR
10824 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10825 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10827 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10828 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10829 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10830 fold_convert_loc (loc, type, arg1));
10832 /* Fold X ^ (X & Y) as X & ~Y. */
10833 if (TREE_CODE (arg1) == BIT_AND_EXPR
10834 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10836 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10837 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10838 fold_convert_loc (loc, type, arg0),
10839 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10841 /* Fold X ^ (Y & X) as ~Y & X. */
10842 if (TREE_CODE (arg1) == BIT_AND_EXPR
10843 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10844 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10846 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10847 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10848 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10849 fold_convert_loc (loc, type, arg0));
10852 /* See if this can be simplified into a rotate first. If that
10853 is unsuccessful continue in the association code. */
10857 if (integer_all_onesp (arg1))
10858 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10859 if (integer_zerop (arg1))
10860 return omit_one_operand_loc (loc, type, arg1, arg0);
10861 if (operand_equal_p (arg0, arg1, 0))
10862 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10864 /* ~X & X is always zero. */
10865 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10866 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10867 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10869 /* X & ~X is always zero. */
10870 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10871 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10872 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10874 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10875 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10876 && TREE_CODE (arg1) == INTEGER_CST
10877 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10879 tree tmp1 = fold_convert_loc (loc, type, arg1);
10880 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10881 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10882 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10883 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10885 fold_convert_loc (loc, type,
10886 fold_build2_loc (loc, BIT_IOR_EXPR,
10887 type, tmp2, tmp3));
10890 /* (X | Y) & Y is (X, Y). */
10891 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10892 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10893 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10894 /* (X | Y) & X is (Y, X). */
10895 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10896 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10897 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10898 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10899 /* X & (X | Y) is (Y, X). */
10900 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10901 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10902 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10903 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10904 /* X & (Y | X) is (Y, X). */
10905 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10906 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10907 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10908 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10910 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10911 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10912 && integer_onep (TREE_OPERAND (arg0, 1))
10913 && integer_onep (arg1))
10915 tem = TREE_OPERAND (arg0, 0);
10916 return fold_build2_loc (loc, EQ_EXPR, type,
10917 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10918 build_int_cst (TREE_TYPE (tem), 1)),
10919 build_int_cst (TREE_TYPE (tem), 0));
10921 /* Fold ~X & 1 as (X & 1) == 0. */
10922 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10923 && integer_onep (arg1))
10925 tem = TREE_OPERAND (arg0, 0);
10926 return fold_build2_loc (loc, EQ_EXPR, type,
10927 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10928 build_int_cst (TREE_TYPE (tem), 1)),
10929 build_int_cst (TREE_TYPE (tem), 0));
10932 /* Fold (X ^ Y) & Y as ~X & Y. */
10933 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10934 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10936 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10937 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10938 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10939 fold_convert_loc (loc, type, arg1));
10941 /* Fold (X ^ Y) & X as ~Y & X. */
10942 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10943 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10944 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10946 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10947 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10948 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10949 fold_convert_loc (loc, type, arg1));
10951 /* Fold X & (X ^ Y) as X & ~Y. */
10952 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10955 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10956 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10957 fold_convert_loc (loc, type, arg0),
10958 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10960 /* Fold X & (Y ^ X) as ~Y & X. */
10961 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10962 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10963 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10965 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10966 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10967 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10968 fold_convert_loc (loc, type, arg0));
10971 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10972 ((A & N) + B) & M -> (A + B) & M
10973 Similarly if (N & M) == 0,
10974 ((A | N) + B) & M -> (A + B) & M
10975 and for - instead of + (or unary - instead of +)
10976 and/or ^ instead of |.
10977 If B is constant and (B & M) == 0, fold into A & M. */
10978 if (host_integerp (arg1, 1))
10980 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
10981 if (~cst1 && (cst1 & (cst1 + 1)) == 0
10982 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10983 && (TREE_CODE (arg0) == PLUS_EXPR
10984 || TREE_CODE (arg0) == MINUS_EXPR
10985 || TREE_CODE (arg0) == NEGATE_EXPR)
10986 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10987 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10991 unsigned HOST_WIDE_INT cst0;
10993 /* Now we know that arg0 is (C + D) or (C - D) or
10994 -C and arg1 (M) is == (1LL << cst) - 1.
10995 Store C into PMOP[0] and D into PMOP[1]. */
10996 pmop[0] = TREE_OPERAND (arg0, 0);
10998 if (TREE_CODE (arg0) != NEGATE_EXPR)
11000 pmop[1] = TREE_OPERAND (arg0, 1);
11004 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11005 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11009 for (; which >= 0; which--)
11010 switch (TREE_CODE (pmop[which]))
11015 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11018 /* tree_low_cst not used, because we don't care about
11020 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11022 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11027 else if (cst0 != 0)
11029 /* If C or D is of the form (A & N) where
11030 (N & M) == M, or of the form (A | N) or
11031 (A ^ N) where (N & M) == 0, replace it with A. */
11032 pmop[which] = TREE_OPERAND (pmop[which], 0);
11035 /* If C or D is a N where (N & M) == 0, it can be
11036 omitted (assumed 0). */
11037 if ((TREE_CODE (arg0) == PLUS_EXPR
11038 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11039 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11040 pmop[which] = NULL;
11046 /* Only build anything new if we optimized one or both arguments
11048 if (pmop[0] != TREE_OPERAND (arg0, 0)
11049 || (TREE_CODE (arg0) != NEGATE_EXPR
11050 && pmop[1] != TREE_OPERAND (arg0, 1)))
11052 tree utype = TREE_TYPE (arg0);
11053 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11055 /* Perform the operations in a type that has defined
11056 overflow behavior. */
11057 utype = unsigned_type_for (TREE_TYPE (arg0));
11058 if (pmop[0] != NULL)
11059 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11060 if (pmop[1] != NULL)
11061 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11064 if (TREE_CODE (arg0) == NEGATE_EXPR)
11065 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11066 else if (TREE_CODE (arg0) == PLUS_EXPR)
11068 if (pmop[0] != NULL && pmop[1] != NULL)
11069 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11071 else if (pmop[0] != NULL)
11073 else if (pmop[1] != NULL)
11076 return build_int_cst (type, 0);
11078 else if (pmop[0] == NULL)
11079 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11081 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11083 /* TEM is now the new binary +, - or unary - replacement. */
11084 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11085 fold_convert_loc (loc, utype, arg1));
11086 return fold_convert_loc (loc, type, tem);
11091 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11092 if (t1 != NULL_TREE)
11094 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11095 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11096 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11099 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11101 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11102 && (~TREE_INT_CST_LOW (arg1)
11103 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11105 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11108 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11110 This results in more efficient code for machines without a NOR
11111 instruction. Combine will canonicalize to the first form
11112 which will allow use of NOR instructions provided by the
11113 backend if they exist. */
11114 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11115 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11117 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11118 build2 (BIT_IOR_EXPR, type,
11119 fold_convert_loc (loc, type,
11120 TREE_OPERAND (arg0, 0)),
11121 fold_convert_loc (loc, type,
11122 TREE_OPERAND (arg1, 0))));
11125 /* If arg0 is derived from the address of an object or function, we may
11126 be able to fold this expression using the object or function's
11128 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11130 unsigned HOST_WIDE_INT modulus, residue;
11131 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11133 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11134 integer_onep (arg1));
11136 /* This works because modulus is a power of 2. If this weren't the
11137 case, we'd have to replace it by its greatest power-of-2
11138 divisor: modulus & -modulus. */
11140 return build_int_cst (type, residue & low);
11143 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11144 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11145 if the new mask might be further optimized. */
11146 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11147 || TREE_CODE (arg0) == RSHIFT_EXPR)
11148 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11149 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11150 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11151 < TYPE_PRECISION (TREE_TYPE (arg0))
11152 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11153 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11155 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11156 unsigned HOST_WIDE_INT mask
11157 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11158 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11159 tree shift_type = TREE_TYPE (arg0);
11161 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11162 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11163 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11164 && TYPE_PRECISION (TREE_TYPE (arg0))
11165 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11167 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11168 tree arg00 = TREE_OPERAND (arg0, 0);
11169 /* See if more bits can be proven as zero because of
11171 if (TREE_CODE (arg00) == NOP_EXPR
11172 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11174 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11175 if (TYPE_PRECISION (inner_type)
11176 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11177 && TYPE_PRECISION (inner_type) < prec)
11179 prec = TYPE_PRECISION (inner_type);
11180 /* See if we can shorten the right shift. */
11182 shift_type = inner_type;
11185 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11186 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11187 zerobits <<= prec - shiftc;
11188 /* For arithmetic shift if sign bit could be set, zerobits
11189 can contain actually sign bits, so no transformation is
11190 possible, unless MASK masks them all away. In that
11191 case the shift needs to be converted into logical shift. */
11192 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11193 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11195 if ((mask & zerobits) == 0)
11196 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11202 /* ((X << 16) & 0xff00) is (X, 0). */
11203 if ((mask & zerobits) == mask)
11204 return omit_one_operand_loc (loc, type,
11205 build_int_cst (type, 0), arg0);
11207 newmask = mask | zerobits;
11208 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11212 /* Only do the transformation if NEWMASK is some integer
11214 for (prec = BITS_PER_UNIT;
11215 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11216 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11218 if (prec < HOST_BITS_PER_WIDE_INT
11219 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11223 if (shift_type != TREE_TYPE (arg0))
11225 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11226 fold_convert_loc (loc, shift_type,
11227 TREE_OPERAND (arg0, 0)),
11228 TREE_OPERAND (arg0, 1));
11229 tem = fold_convert_loc (loc, type, tem);
11233 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11234 if (!tree_int_cst_equal (newmaskt, arg1))
11235 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11243 /* Don't touch a floating-point divide by zero unless the mode
11244 of the constant can represent infinity. */
11245 if (TREE_CODE (arg1) == REAL_CST
11246 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11247 && real_zerop (arg1))
11250 /* Optimize A / A to 1.0 if we don't care about
11251 NaNs or Infinities. Skip the transformation
11252 for non-real operands. */
11253 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11254 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11255 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11256 && operand_equal_p (arg0, arg1, 0))
11258 tree r = build_real (TREE_TYPE (arg0), dconst1);
11260 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11263 /* The complex version of the above A / A optimization. */
11264 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11265 && operand_equal_p (arg0, arg1, 0))
11267 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11268 if (! HONOR_NANS (TYPE_MODE (elem_type))
11269 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11271 tree r = build_real (elem_type, dconst1);
11272 /* omit_two_operands will call fold_convert for us. */
11273 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11277 /* (-A) / (-B) -> A / B */
11278 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11279 return fold_build2_loc (loc, RDIV_EXPR, type,
11280 TREE_OPERAND (arg0, 0),
11281 negate_expr (arg1));
11282 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11283 return fold_build2_loc (loc, RDIV_EXPR, type,
11284 negate_expr (arg0),
11285 TREE_OPERAND (arg1, 0));
11287 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11288 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11289 && real_onep (arg1))
11290 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11292 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11293 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11294 && real_minus_onep (arg1))
11295 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11296 negate_expr (arg0)));
11298 /* If ARG1 is a constant, we can convert this to a multiply by the
11299 reciprocal. This does not have the same rounding properties,
11300 so only do this if -freciprocal-math. We can actually
11301 always safely do it if ARG1 is a power of two, but it's hard to
11302 tell if it is or not in a portable manner. */
11303 if (TREE_CODE (arg1) == REAL_CST)
11305 if (flag_reciprocal_math
11306 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11308 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11309 /* Find the reciprocal if optimizing and the result is exact. */
11313 r = TREE_REAL_CST (arg1);
11314 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11316 tem = build_real (type, r);
11317 return fold_build2_loc (loc, MULT_EXPR, type,
11318 fold_convert_loc (loc, type, arg0), tem);
11322 /* Convert A/B/C to A/(B*C). */
11323 if (flag_reciprocal_math
11324 && TREE_CODE (arg0) == RDIV_EXPR)
11325 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11326 fold_build2_loc (loc, MULT_EXPR, type,
11327 TREE_OPERAND (arg0, 1), arg1));
11329 /* Convert A/(B/C) to (A/B)*C. */
11330 if (flag_reciprocal_math
11331 && TREE_CODE (arg1) == RDIV_EXPR)
11332 return fold_build2_loc (loc, MULT_EXPR, type,
11333 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11334 TREE_OPERAND (arg1, 0)),
11335 TREE_OPERAND (arg1, 1));
11337 /* Convert C1/(X*C2) into (C1/C2)/X. */
11338 if (flag_reciprocal_math
11339 && TREE_CODE (arg1) == MULT_EXPR
11340 && TREE_CODE (arg0) == REAL_CST
11341 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11343 tree tem = const_binop (RDIV_EXPR, arg0,
11344 TREE_OPERAND (arg1, 1));
11346 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11347 TREE_OPERAND (arg1, 0));
11350 if (flag_unsafe_math_optimizations)
11352 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11353 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11355 /* Optimize sin(x)/cos(x) as tan(x). */
11356 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11357 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11358 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11359 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11360 CALL_EXPR_ARG (arg1, 0), 0))
11362 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11364 if (tanfn != NULL_TREE)
11365 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11368 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11369 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11370 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11371 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11372 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11373 CALL_EXPR_ARG (arg1, 0), 0))
11375 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11377 if (tanfn != NULL_TREE)
11379 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11380 CALL_EXPR_ARG (arg0, 0));
11381 return fold_build2_loc (loc, RDIV_EXPR, type,
11382 build_real (type, dconst1), tmp);
11386 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11387 NaNs or Infinities. */
11388 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11389 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11390 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11392 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11393 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11395 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11396 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11397 && operand_equal_p (arg00, arg01, 0))
11399 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11401 if (cosfn != NULL_TREE)
11402 return build_call_expr_loc (loc, cosfn, 1, arg00);
11406 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11407 NaNs or Infinities. */
11408 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11409 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11410 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11412 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11413 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11415 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11416 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11417 && operand_equal_p (arg00, arg01, 0))
11419 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11421 if (cosfn != NULL_TREE)
11423 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11424 return fold_build2_loc (loc, RDIV_EXPR, type,
11425 build_real (type, dconst1),
11431 /* Optimize pow(x,c)/x as pow(x,c-1). */
11432 if (fcode0 == BUILT_IN_POW
11433 || fcode0 == BUILT_IN_POWF
11434 || fcode0 == BUILT_IN_POWL)
11436 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11437 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11438 if (TREE_CODE (arg01) == REAL_CST
11439 && !TREE_OVERFLOW (arg01)
11440 && operand_equal_p (arg1, arg00, 0))
11442 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11446 c = TREE_REAL_CST (arg01);
11447 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11448 arg = build_real (type, c);
11449 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11453 /* Optimize a/root(b/c) into a*root(c/b). */
11454 if (BUILTIN_ROOT_P (fcode1))
11456 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11458 if (TREE_CODE (rootarg) == RDIV_EXPR)
11460 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11461 tree b = TREE_OPERAND (rootarg, 0);
11462 tree c = TREE_OPERAND (rootarg, 1);
11464 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11466 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11467 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11471 /* Optimize x/expN(y) into x*expN(-y). */
11472 if (BUILTIN_EXPONENT_P (fcode1))
11474 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11475 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11476 arg1 = build_call_expr_loc (loc,
11478 fold_convert_loc (loc, type, arg));
11479 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11482 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11483 if (fcode1 == BUILT_IN_POW
11484 || fcode1 == BUILT_IN_POWF
11485 || fcode1 == BUILT_IN_POWL)
11487 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11488 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11489 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11490 tree neg11 = fold_convert_loc (loc, type,
11491 negate_expr (arg11));
11492 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11493 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11498 case TRUNC_DIV_EXPR:
11499 /* Optimize (X & (-A)) / A where A is a power of 2,
11501 if (TREE_CODE (arg0) == BIT_AND_EXPR
11502 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11503 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11505 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11506 arg1, TREE_OPERAND (arg0, 1));
11507 if (sum && integer_zerop (sum)) {
11508 unsigned long pow2;
11510 if (TREE_INT_CST_LOW (arg1))
11511 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11513 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11514 + HOST_BITS_PER_WIDE_INT;
11516 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11517 TREE_OPERAND (arg0, 0),
11518 build_int_cst (integer_type_node, pow2));
11524 case FLOOR_DIV_EXPR:
11525 /* Simplify A / (B << N) where A and B are positive and B is
11526 a power of 2, to A >> (N + log2(B)). */
11527 strict_overflow_p = false;
11528 if (TREE_CODE (arg1) == LSHIFT_EXPR
11529 && (TYPE_UNSIGNED (type)
11530 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11532 tree sval = TREE_OPERAND (arg1, 0);
11533 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11535 tree sh_cnt = TREE_OPERAND (arg1, 1);
11536 unsigned long pow2;
11538 if (TREE_INT_CST_LOW (sval))
11539 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11541 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11542 + HOST_BITS_PER_WIDE_INT;
11544 if (strict_overflow_p)
11545 fold_overflow_warning (("assuming signed overflow does not "
11546 "occur when simplifying A / (B << N)"),
11547 WARN_STRICT_OVERFLOW_MISC);
11549 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11551 build_int_cst (TREE_TYPE (sh_cnt),
11553 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11554 fold_convert_loc (loc, type, arg0), sh_cnt);
11558 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11559 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11560 if (INTEGRAL_TYPE_P (type)
11561 && TYPE_UNSIGNED (type)
11562 && code == FLOOR_DIV_EXPR)
11563 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11567 case ROUND_DIV_EXPR:
11568 case CEIL_DIV_EXPR:
11569 case EXACT_DIV_EXPR:
11570 if (integer_onep (arg1))
11571 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11572 if (integer_zerop (arg1))
11574 /* X / -1 is -X. */
11575 if (!TYPE_UNSIGNED (type)
11576 && TREE_CODE (arg1) == INTEGER_CST
11577 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11578 && TREE_INT_CST_HIGH (arg1) == -1)
11579 return fold_convert_loc (loc, type, negate_expr (arg0));
11581 /* Convert -A / -B to A / B when the type is signed and overflow is
11583 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11584 && TREE_CODE (arg0) == NEGATE_EXPR
11585 && negate_expr_p (arg1))
11587 if (INTEGRAL_TYPE_P (type))
11588 fold_overflow_warning (("assuming signed overflow does not occur "
11589 "when distributing negation across "
11591 WARN_STRICT_OVERFLOW_MISC);
11592 return fold_build2_loc (loc, code, type,
11593 fold_convert_loc (loc, type,
11594 TREE_OPERAND (arg0, 0)),
11595 fold_convert_loc (loc, type,
11596 negate_expr (arg1)));
11598 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11599 && TREE_CODE (arg1) == NEGATE_EXPR
11600 && negate_expr_p (arg0))
11602 if (INTEGRAL_TYPE_P (type))
11603 fold_overflow_warning (("assuming signed overflow does not occur "
11604 "when distributing negation across "
11606 WARN_STRICT_OVERFLOW_MISC);
11607 return fold_build2_loc (loc, code, type,
11608 fold_convert_loc (loc, type,
11609 negate_expr (arg0)),
11610 fold_convert_loc (loc, type,
11611 TREE_OPERAND (arg1, 0)));
11614 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11615 operation, EXACT_DIV_EXPR.
11617 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11618 At one time others generated faster code, it's not clear if they do
11619 after the last round to changes to the DIV code in expmed.c. */
11620 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11621 && multiple_of_p (type, arg0, arg1))
11622 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11624 strict_overflow_p = false;
11625 if (TREE_CODE (arg1) == INTEGER_CST
11626 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11627 &strict_overflow_p)))
11629 if (strict_overflow_p)
11630 fold_overflow_warning (("assuming signed overflow does not occur "
11631 "when simplifying division"),
11632 WARN_STRICT_OVERFLOW_MISC);
11633 return fold_convert_loc (loc, type, tem);
11638 case CEIL_MOD_EXPR:
11639 case FLOOR_MOD_EXPR:
11640 case ROUND_MOD_EXPR:
11641 case TRUNC_MOD_EXPR:
11642 /* X % 1 is always zero, but be sure to preserve any side
11644 if (integer_onep (arg1))
11645 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11647 /* X % 0, return X % 0 unchanged so that we can get the
11648 proper warnings and errors. */
11649 if (integer_zerop (arg1))
11652 /* 0 % X is always zero, but be sure to preserve any side
11653 effects in X. Place this after checking for X == 0. */
11654 if (integer_zerop (arg0))
11655 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11657 /* X % -1 is zero. */
11658 if (!TYPE_UNSIGNED (type)
11659 && TREE_CODE (arg1) == INTEGER_CST
11660 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11661 && TREE_INT_CST_HIGH (arg1) == -1)
11662 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11664 /* X % -C is the same as X % C. */
11665 if (code == TRUNC_MOD_EXPR
11666 && !TYPE_UNSIGNED (type)
11667 && TREE_CODE (arg1) == INTEGER_CST
11668 && !TREE_OVERFLOW (arg1)
11669 && TREE_INT_CST_HIGH (arg1) < 0
11670 && !TYPE_OVERFLOW_TRAPS (type)
11671 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11672 && !sign_bit_p (arg1, arg1))
11673 return fold_build2_loc (loc, code, type,
11674 fold_convert_loc (loc, type, arg0),
11675 fold_convert_loc (loc, type,
11676 negate_expr (arg1)));
11678 /* X % -Y is the same as X % Y. */
11679 if (code == TRUNC_MOD_EXPR
11680 && !TYPE_UNSIGNED (type)
11681 && TREE_CODE (arg1) == NEGATE_EXPR
11682 && !TYPE_OVERFLOW_TRAPS (type))
11683 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11684 fold_convert_loc (loc, type,
11685 TREE_OPERAND (arg1, 0)));
11687 strict_overflow_p = false;
11688 if (TREE_CODE (arg1) == INTEGER_CST
11689 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11690 &strict_overflow_p)))
11692 if (strict_overflow_p)
11693 fold_overflow_warning (("assuming signed overflow does not occur "
11694 "when simplifying modulus"),
11695 WARN_STRICT_OVERFLOW_MISC);
11696 return fold_convert_loc (loc, type, tem);
11699 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11700 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11701 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11702 && (TYPE_UNSIGNED (type)
11703 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11706 /* Also optimize A % (C << N) where C is a power of 2,
11707 to A & ((C << N) - 1). */
11708 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11709 c = TREE_OPERAND (arg1, 0);
11711 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11714 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11715 build_int_cst (TREE_TYPE (arg1), 1));
11716 if (strict_overflow_p)
11717 fold_overflow_warning (("assuming signed overflow does not "
11718 "occur when simplifying "
11719 "X % (power of two)"),
11720 WARN_STRICT_OVERFLOW_MISC);
11721 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11722 fold_convert_loc (loc, type, arg0),
11723 fold_convert_loc (loc, type, mask));
11731 if (integer_all_onesp (arg0))
11732 return omit_one_operand_loc (loc, type, arg0, arg1);
11736 /* Optimize -1 >> x for arithmetic right shifts. */
11737 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11738 && tree_expr_nonnegative_p (arg1))
11739 return omit_one_operand_loc (loc, type, arg0, arg1);
11740 /* ... fall through ... */
11744 if (integer_zerop (arg1))
11745 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11746 if (integer_zerop (arg0))
11747 return omit_one_operand_loc (loc, type, arg0, arg1);
11749 /* Since negative shift count is not well-defined,
11750 don't try to compute it in the compiler. */
11751 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11754 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11755 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11756 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11757 && host_integerp (TREE_OPERAND (arg0, 1), false)
11758 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11760 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11761 + TREE_INT_CST_LOW (arg1));
11763 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11764 being well defined. */
11765 if (low >= TYPE_PRECISION (type))
11767 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11768 low = low % TYPE_PRECISION (type);
11769 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11770 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11771 TREE_OPERAND (arg0, 0));
11773 low = TYPE_PRECISION (type) - 1;
11776 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11777 build_int_cst (type, low));
11780 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11781 into x & ((unsigned)-1 >> c) for unsigned types. */
11782 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11783 || (TYPE_UNSIGNED (type)
11784 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11785 && host_integerp (arg1, false)
11786 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11787 && host_integerp (TREE_OPERAND (arg0, 1), false)
11788 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11790 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11791 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11797 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11799 lshift = build_int_cst (type, -1);
11800 lshift = int_const_binop (code, lshift, arg1);
11802 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11806 /* Rewrite an LROTATE_EXPR by a constant into an
11807 RROTATE_EXPR by a new constant. */
11808 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11810 tree tem = build_int_cst (TREE_TYPE (arg1),
11811 TYPE_PRECISION (type));
11812 tem = const_binop (MINUS_EXPR, tem, arg1);
11813 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11816 /* If we have a rotate of a bit operation with the rotate count and
11817 the second operand of the bit operation both constant,
11818 permute the two operations. */
11819 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11820 && (TREE_CODE (arg0) == BIT_AND_EXPR
11821 || TREE_CODE (arg0) == BIT_IOR_EXPR
11822 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11823 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11824 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11825 fold_build2_loc (loc, code, type,
11826 TREE_OPERAND (arg0, 0), arg1),
11827 fold_build2_loc (loc, code, type,
11828 TREE_OPERAND (arg0, 1), arg1));
11830 /* Two consecutive rotates adding up to the precision of the
11831 type can be ignored. */
11832 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11833 && TREE_CODE (arg0) == RROTATE_EXPR
11834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11835 && TREE_INT_CST_HIGH (arg1) == 0
11836 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11837 && ((TREE_INT_CST_LOW (arg1)
11838 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11839 == (unsigned int) TYPE_PRECISION (type)))
11840 return TREE_OPERAND (arg0, 0);
11842 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11843 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11844 if the latter can be further optimized. */
11845 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11846 && TREE_CODE (arg0) == BIT_AND_EXPR
11847 && TREE_CODE (arg1) == INTEGER_CST
11848 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11850 tree mask = fold_build2_loc (loc, code, type,
11851 fold_convert_loc (loc, type,
11852 TREE_OPERAND (arg0, 1)),
11854 tree shift = fold_build2_loc (loc, code, type,
11855 fold_convert_loc (loc, type,
11856 TREE_OPERAND (arg0, 0)),
11858 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11866 if (operand_equal_p (arg0, arg1, 0))
11867 return omit_one_operand_loc (loc, type, arg0, arg1);
11868 if (INTEGRAL_TYPE_P (type)
11869 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11870 return omit_one_operand_loc (loc, type, arg1, arg0);
11871 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11877 if (operand_equal_p (arg0, arg1, 0))
11878 return omit_one_operand_loc (loc, type, arg0, arg1);
11879 if (INTEGRAL_TYPE_P (type)
11880 && TYPE_MAX_VALUE (type)
11881 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11882 return omit_one_operand_loc (loc, type, arg1, arg0);
11883 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11888 case TRUTH_ANDIF_EXPR:
11889 /* Note that the operands of this must be ints
11890 and their values must be 0 or 1.
11891 ("true" is a fixed value perhaps depending on the language.) */
11892 /* If first arg is constant zero, return it. */
11893 if (integer_zerop (arg0))
11894 return fold_convert_loc (loc, type, arg0);
11895 case TRUTH_AND_EXPR:
11896 /* If either arg is constant true, drop it. */
11897 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11898 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11899 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11900 /* Preserve sequence points. */
11901 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11902 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11903 /* If second arg is constant zero, result is zero, but first arg
11904 must be evaluated. */
11905 if (integer_zerop (arg1))
11906 return omit_one_operand_loc (loc, type, arg1, arg0);
11907 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11908 case will be handled here. */
11909 if (integer_zerop (arg0))
11910 return omit_one_operand_loc (loc, type, arg0, arg1);
11912 /* !X && X is always false. */
11913 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11914 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11915 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11916 /* X && !X is always false. */
11917 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11918 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11919 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11921 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11922 means A >= Y && A != MAX, but in this case we know that
11925 if (!TREE_SIDE_EFFECTS (arg0)
11926 && !TREE_SIDE_EFFECTS (arg1))
11928 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11929 if (tem && !operand_equal_p (tem, arg0, 0))
11930 return fold_build2_loc (loc, code, type, tem, arg1);
11932 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11933 if (tem && !operand_equal_p (tem, arg1, 0))
11934 return fold_build2_loc (loc, code, type, arg0, tem);
11938 /* We only do these simplifications if we are optimizing. */
11942 /* Check for things like (A || B) && (A || C). We can convert this
11943 to A || (B && C). Note that either operator can be any of the four
11944 truth and/or operations and the transformation will still be
11945 valid. Also note that we only care about order for the
11946 ANDIF and ORIF operators. If B contains side effects, this
11947 might change the truth-value of A. */
11948 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11949 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11950 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11951 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11952 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11953 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11955 tree a00 = TREE_OPERAND (arg0, 0);
11956 tree a01 = TREE_OPERAND (arg0, 1);
11957 tree a10 = TREE_OPERAND (arg1, 0);
11958 tree a11 = TREE_OPERAND (arg1, 1);
11959 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11960 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11961 && (code == TRUTH_AND_EXPR
11962 || code == TRUTH_OR_EXPR));
11964 if (operand_equal_p (a00, a10, 0))
11965 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11966 fold_build2_loc (loc, code, type, a01, a11));
11967 else if (commutative && operand_equal_p (a00, a11, 0))
11968 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11969 fold_build2_loc (loc, code, type, a01, a10));
11970 else if (commutative && operand_equal_p (a01, a10, 0))
11971 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11972 fold_build2_loc (loc, code, type, a00, a11));
11974 /* This case if tricky because we must either have commutative
11975 operators or else A10 must not have side-effects. */
11977 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11978 && operand_equal_p (a01, a11, 0))
11979 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11980 fold_build2_loc (loc, code, type, a00, a10),
11984 /* See if we can build a range comparison. */
11985 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11988 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11989 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11991 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11993 return fold_build2_loc (loc, code, type, tem, arg1);
11996 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11997 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11999 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12001 return fold_build2_loc (loc, code, type, arg0, tem);
12004 /* Check for the possibility of merging component references. If our
12005 lhs is another similar operation, try to merge its rhs with our
12006 rhs. Then try to merge our lhs and rhs. */
12007 if (TREE_CODE (arg0) == code
12008 && 0 != (tem = fold_truthop (loc, code, type,
12009 TREE_OPERAND (arg0, 1), arg1)))
12010 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12012 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12017 case TRUTH_ORIF_EXPR:
12018 /* Note that the operands of this must be ints
12019 and their values must be 0 or true.
12020 ("true" is a fixed value perhaps depending on the language.) */
12021 /* If first arg is constant true, return it. */
12022 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12023 return fold_convert_loc (loc, type, arg0);
12024 case TRUTH_OR_EXPR:
12025 /* If either arg is constant zero, drop it. */
12026 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12027 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12028 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12029 /* Preserve sequence points. */
12030 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12031 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12032 /* If second arg is constant true, result is true, but we must
12033 evaluate first arg. */
12034 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12035 return omit_one_operand_loc (loc, type, arg1, arg0);
12036 /* Likewise for first arg, but note this only occurs here for
12038 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12039 return omit_one_operand_loc (loc, type, arg0, arg1);
12041 /* !X || X is always true. */
12042 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12043 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12044 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12045 /* X || !X is always true. */
12046 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12047 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12048 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12050 /* (X && !Y) || (!X && Y) is X ^ Y */
12051 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12052 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12054 tree a0, a1, l0, l1, n0, n1;
12056 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12057 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12059 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12060 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12062 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12063 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12065 if ((operand_equal_p (n0, a0, 0)
12066 && operand_equal_p (n1, a1, 0))
12067 || (operand_equal_p (n0, a1, 0)
12068 && operand_equal_p (n1, a0, 0)))
12069 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12073 case TRUTH_XOR_EXPR:
12074 /* If the second arg is constant zero, drop it. */
12075 if (integer_zerop (arg1))
12076 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12077 /* If the second arg is constant true, this is a logical inversion. */
12078 if (integer_onep (arg1))
12080 /* Only call invert_truthvalue if operand is a truth value. */
12081 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12082 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12084 tem = invert_truthvalue_loc (loc, arg0);
12085 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12087 /* Identical arguments cancel to zero. */
12088 if (operand_equal_p (arg0, arg1, 0))
12089 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12091 /* !X ^ X is always true. */
12092 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12093 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12094 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12096 /* X ^ !X is always true. */
12097 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12098 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12099 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12108 tem = fold_comparison (loc, code, type, op0, op1);
12109 if (tem != NULL_TREE)
12112 /* bool_var != 0 becomes bool_var. */
12113 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12114 && code == NE_EXPR)
12115 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12117 /* bool_var == 1 becomes bool_var. */
12118 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12119 && code == EQ_EXPR)
12120 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12122 /* bool_var != 1 becomes !bool_var. */
12123 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12124 && code == NE_EXPR)
12125 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12126 fold_convert_loc (loc, type, arg0));
12128 /* bool_var == 0 becomes !bool_var. */
12129 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12130 && code == EQ_EXPR)
12131 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12132 fold_convert_loc (loc, type, arg0));
12134 /* !exp != 0 becomes !exp */
12135 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12136 && code == NE_EXPR)
12137 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12139 /* If this is an equality comparison of the address of two non-weak,
12140 unaliased symbols neither of which are extern (since we do not
12141 have access to attributes for externs), then we know the result. */
12142 if (TREE_CODE (arg0) == ADDR_EXPR
12143 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12144 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12145 && ! lookup_attribute ("alias",
12146 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12147 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12148 && TREE_CODE (arg1) == ADDR_EXPR
12149 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12150 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12151 && ! lookup_attribute ("alias",
12152 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12153 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12155 /* We know that we're looking at the address of two
12156 non-weak, unaliased, static _DECL nodes.
12158 It is both wasteful and incorrect to call operand_equal_p
12159 to compare the two ADDR_EXPR nodes. It is wasteful in that
12160 all we need to do is test pointer equality for the arguments
12161 to the two ADDR_EXPR nodes. It is incorrect to use
12162 operand_equal_p as that function is NOT equivalent to a
12163 C equality test. It can in fact return false for two
12164 objects which would test as equal using the C equality
12166 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12167 return constant_boolean_node (equal
12168 ? code == EQ_EXPR : code != EQ_EXPR,
12172 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12173 a MINUS_EXPR of a constant, we can convert it into a comparison with
12174 a revised constant as long as no overflow occurs. */
12175 if (TREE_CODE (arg1) == INTEGER_CST
12176 && (TREE_CODE (arg0) == PLUS_EXPR
12177 || TREE_CODE (arg0) == MINUS_EXPR)
12178 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12179 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12180 ? MINUS_EXPR : PLUS_EXPR,
12181 fold_convert_loc (loc, TREE_TYPE (arg0),
12183 TREE_OPERAND (arg0, 1)))
12184 && !TREE_OVERFLOW (tem))
12185 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12187 /* Similarly for a NEGATE_EXPR. */
12188 if (TREE_CODE (arg0) == NEGATE_EXPR
12189 && TREE_CODE (arg1) == INTEGER_CST
12190 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12192 && TREE_CODE (tem) == INTEGER_CST
12193 && !TREE_OVERFLOW (tem))
12194 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12196 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12197 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12198 && TREE_CODE (arg1) == INTEGER_CST
12199 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12200 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12201 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12202 fold_convert_loc (loc,
12205 TREE_OPERAND (arg0, 1)));
12207 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12208 if ((TREE_CODE (arg0) == PLUS_EXPR
12209 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12210 || TREE_CODE (arg0) == MINUS_EXPR)
12211 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12214 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12215 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12217 tree val = TREE_OPERAND (arg0, 1);
12218 return omit_two_operands_loc (loc, type,
12219 fold_build2_loc (loc, code, type,
12221 build_int_cst (TREE_TYPE (val),
12223 TREE_OPERAND (arg0, 0), arg1);
12226 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12227 if (TREE_CODE (arg0) == MINUS_EXPR
12228 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12229 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12232 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12234 return omit_two_operands_loc (loc, type,
12236 ? boolean_true_node : boolean_false_node,
12237 TREE_OPERAND (arg0, 1), arg1);
12240 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12241 for !=. Don't do this for ordered comparisons due to overflow. */
12242 if (TREE_CODE (arg0) == MINUS_EXPR
12243 && integer_zerop (arg1))
12244 return fold_build2_loc (loc, code, type,
12245 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12247 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12248 if (TREE_CODE (arg0) == ABS_EXPR
12249 && (integer_zerop (arg1) || real_zerop (arg1)))
12250 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12252 /* If this is an EQ or NE comparison with zero and ARG0 is
12253 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12254 two operations, but the latter can be done in one less insn
12255 on machines that have only two-operand insns or on which a
12256 constant cannot be the first operand. */
12257 if (TREE_CODE (arg0) == BIT_AND_EXPR
12258 && integer_zerop (arg1))
12260 tree arg00 = TREE_OPERAND (arg0, 0);
12261 tree arg01 = TREE_OPERAND (arg0, 1);
12262 if (TREE_CODE (arg00) == LSHIFT_EXPR
12263 && integer_onep (TREE_OPERAND (arg00, 0)))
12265 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12266 arg01, TREE_OPERAND (arg00, 1));
12267 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12268 build_int_cst (TREE_TYPE (arg0), 1));
12269 return fold_build2_loc (loc, code, type,
12270 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12273 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12274 && integer_onep (TREE_OPERAND (arg01, 0)))
12276 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12277 arg00, TREE_OPERAND (arg01, 1));
12278 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12279 build_int_cst (TREE_TYPE (arg0), 1));
12280 return fold_build2_loc (loc, code, type,
12281 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12286 /* If this is an NE or EQ comparison of zero against the result of a
12287 signed MOD operation whose second operand is a power of 2, make
12288 the MOD operation unsigned since it is simpler and equivalent. */
12289 if (integer_zerop (arg1)
12290 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12291 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12292 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12293 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12294 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12295 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12297 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12298 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12299 fold_convert_loc (loc, newtype,
12300 TREE_OPERAND (arg0, 0)),
12301 fold_convert_loc (loc, newtype,
12302 TREE_OPERAND (arg0, 1)));
12304 return fold_build2_loc (loc, code, type, newmod,
12305 fold_convert_loc (loc, newtype, arg1));
12308 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12309 C1 is a valid shift constant, and C2 is a power of two, i.e.
12311 if (TREE_CODE (arg0) == BIT_AND_EXPR
12312 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12313 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12315 && integer_pow2p (TREE_OPERAND (arg0, 1))
12316 && integer_zerop (arg1))
12318 tree itype = TREE_TYPE (arg0);
12319 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12320 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12322 /* Check for a valid shift count. */
12323 if (TREE_INT_CST_HIGH (arg001) == 0
12324 && TREE_INT_CST_LOW (arg001) < prec)
12326 tree arg01 = TREE_OPERAND (arg0, 1);
12327 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12328 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12329 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12330 can be rewritten as (X & (C2 << C1)) != 0. */
12331 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12333 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12334 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12335 return fold_build2_loc (loc, code, type, tem,
12336 fold_convert_loc (loc, itype, arg1));
12338 /* Otherwise, for signed (arithmetic) shifts,
12339 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12340 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12341 else if (!TYPE_UNSIGNED (itype))
12342 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12343 arg000, build_int_cst (itype, 0));
12344 /* Otherwise, of unsigned (logical) shifts,
12345 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12346 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12348 return omit_one_operand_loc (loc, type,
12349 code == EQ_EXPR ? integer_one_node
12350 : integer_zero_node,
12355 /* If this is an NE comparison of zero with an AND of one, remove the
12356 comparison since the AND will give the correct value. */
12357 if (code == NE_EXPR
12358 && integer_zerop (arg1)
12359 && TREE_CODE (arg0) == BIT_AND_EXPR
12360 && integer_onep (TREE_OPERAND (arg0, 1)))
12361 return fold_convert_loc (loc, type, arg0);
12363 /* If we have (A & C) == C where C is a power of 2, convert this into
12364 (A & C) != 0. Similarly for NE_EXPR. */
12365 if (TREE_CODE (arg0) == BIT_AND_EXPR
12366 && integer_pow2p (TREE_OPERAND (arg0, 1))
12367 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12368 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12369 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12370 integer_zero_node));
12372 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12373 bit, then fold the expression into A < 0 or A >= 0. */
12374 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12378 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12379 Similarly for NE_EXPR. */
12380 if (TREE_CODE (arg0) == BIT_AND_EXPR
12381 && TREE_CODE (arg1) == INTEGER_CST
12382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12384 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12385 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12386 TREE_OPERAND (arg0, 1));
12388 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12389 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12391 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12392 if (integer_nonzerop (dandnotc))
12393 return omit_one_operand_loc (loc, type, rslt, arg0);
12396 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12397 Similarly for NE_EXPR. */
12398 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12399 && TREE_CODE (arg1) == INTEGER_CST
12400 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12402 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12404 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12405 TREE_OPERAND (arg0, 1),
12406 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12407 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12408 if (integer_nonzerop (candnotd))
12409 return omit_one_operand_loc (loc, type, rslt, arg0);
12412 /* If this is a comparison of a field, we may be able to simplify it. */
12413 if ((TREE_CODE (arg0) == COMPONENT_REF
12414 || TREE_CODE (arg0) == BIT_FIELD_REF)
12415 /* Handle the constant case even without -O
12416 to make sure the warnings are given. */
12417 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12419 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12424 /* Optimize comparisons of strlen vs zero to a compare of the
12425 first character of the string vs zero. To wit,
12426 strlen(ptr) == 0 => *ptr == 0
12427 strlen(ptr) != 0 => *ptr != 0
12428 Other cases should reduce to one of these two (or a constant)
12429 due to the return value of strlen being unsigned. */
12430 if (TREE_CODE (arg0) == CALL_EXPR
12431 && integer_zerop (arg1))
12433 tree fndecl = get_callee_fndecl (arg0);
12436 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12437 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12438 && call_expr_nargs (arg0) == 1
12439 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12441 tree iref = build_fold_indirect_ref_loc (loc,
12442 CALL_EXPR_ARG (arg0, 0));
12443 return fold_build2_loc (loc, code, type, iref,
12444 build_int_cst (TREE_TYPE (iref), 0));
12448 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12449 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12450 if (TREE_CODE (arg0) == RSHIFT_EXPR
12451 && integer_zerop (arg1)
12452 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12454 tree arg00 = TREE_OPERAND (arg0, 0);
12455 tree arg01 = TREE_OPERAND (arg0, 1);
12456 tree itype = TREE_TYPE (arg00);
12457 if (TREE_INT_CST_HIGH (arg01) == 0
12458 && TREE_INT_CST_LOW (arg01)
12459 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12461 if (TYPE_UNSIGNED (itype))
12463 itype = signed_type_for (itype);
12464 arg00 = fold_convert_loc (loc, itype, arg00);
12466 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12467 type, arg00, build_int_cst (itype, 0));
12471 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12472 if (integer_zerop (arg1)
12473 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12474 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12475 TREE_OPERAND (arg0, 1));
12477 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12478 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12479 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12480 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12481 build_int_cst (TREE_TYPE (arg0), 0));
12482 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12483 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12484 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12485 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12486 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12487 build_int_cst (TREE_TYPE (arg0), 0));
12489 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12490 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12491 && TREE_CODE (arg1) == INTEGER_CST
12492 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12493 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12494 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12495 TREE_OPERAND (arg0, 1), arg1));
12497 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12498 (X & C) == 0 when C is a single bit. */
12499 if (TREE_CODE (arg0) == BIT_AND_EXPR
12500 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12501 && integer_zerop (arg1)
12502 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12504 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12505 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12506 TREE_OPERAND (arg0, 1));
12507 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12509 fold_convert_loc (loc, TREE_TYPE (arg0),
12513 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12514 constant C is a power of two, i.e. a single bit. */
12515 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12516 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12517 && integer_zerop (arg1)
12518 && integer_pow2p (TREE_OPERAND (arg0, 1))
12519 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12520 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12522 tree arg00 = TREE_OPERAND (arg0, 0);
12523 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12524 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12527 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12528 when is C is a power of two, i.e. a single bit. */
12529 if (TREE_CODE (arg0) == BIT_AND_EXPR
12530 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12531 && integer_zerop (arg1)
12532 && integer_pow2p (TREE_OPERAND (arg0, 1))
12533 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12534 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12536 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12537 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12538 arg000, TREE_OPERAND (arg0, 1));
12539 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12540 tem, build_int_cst (TREE_TYPE (tem), 0));
12543 if (integer_zerop (arg1)
12544 && tree_expr_nonzero_p (arg0))
12546 tree res = constant_boolean_node (code==NE_EXPR, type);
12547 return omit_one_operand_loc (loc, type, res, arg0);
12550 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12551 if (TREE_CODE (arg0) == NEGATE_EXPR
12552 && TREE_CODE (arg1) == NEGATE_EXPR)
12553 return fold_build2_loc (loc, code, type,
12554 TREE_OPERAND (arg0, 0),
12555 fold_convert_loc (loc, TREE_TYPE (arg0),
12556 TREE_OPERAND (arg1, 0)));
12558 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12559 if (TREE_CODE (arg0) == BIT_AND_EXPR
12560 && TREE_CODE (arg1) == BIT_AND_EXPR)
12562 tree arg00 = TREE_OPERAND (arg0, 0);
12563 tree arg01 = TREE_OPERAND (arg0, 1);
12564 tree arg10 = TREE_OPERAND (arg1, 0);
12565 tree arg11 = TREE_OPERAND (arg1, 1);
12566 tree itype = TREE_TYPE (arg0);
12568 if (operand_equal_p (arg01, arg11, 0))
12569 return fold_build2_loc (loc, code, type,
12570 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12571 fold_build2_loc (loc,
12572 BIT_XOR_EXPR, itype,
12575 build_int_cst (itype, 0));
12577 if (operand_equal_p (arg01, arg10, 0))
12578 return fold_build2_loc (loc, code, type,
12579 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12580 fold_build2_loc (loc,
12581 BIT_XOR_EXPR, itype,
12584 build_int_cst (itype, 0));
12586 if (operand_equal_p (arg00, arg11, 0))
12587 return fold_build2_loc (loc, code, type,
12588 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12589 fold_build2_loc (loc,
12590 BIT_XOR_EXPR, itype,
12593 build_int_cst (itype, 0));
12595 if (operand_equal_p (arg00, arg10, 0))
12596 return fold_build2_loc (loc, code, type,
12597 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12598 fold_build2_loc (loc,
12599 BIT_XOR_EXPR, itype,
12602 build_int_cst (itype, 0));
12605 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12606 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12608 tree arg00 = TREE_OPERAND (arg0, 0);
12609 tree arg01 = TREE_OPERAND (arg0, 1);
12610 tree arg10 = TREE_OPERAND (arg1, 0);
12611 tree arg11 = TREE_OPERAND (arg1, 1);
12612 tree itype = TREE_TYPE (arg0);
12614 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12615 operand_equal_p guarantees no side-effects so we don't need
12616 to use omit_one_operand on Z. */
12617 if (operand_equal_p (arg01, arg11, 0))
12618 return fold_build2_loc (loc, code, type, arg00,
12619 fold_convert_loc (loc, TREE_TYPE (arg00),
12621 if (operand_equal_p (arg01, arg10, 0))
12622 return fold_build2_loc (loc, code, type, arg00,
12623 fold_convert_loc (loc, TREE_TYPE (arg00),
12625 if (operand_equal_p (arg00, arg11, 0))
12626 return fold_build2_loc (loc, code, type, arg01,
12627 fold_convert_loc (loc, TREE_TYPE (arg01),
12629 if (operand_equal_p (arg00, arg10, 0))
12630 return fold_build2_loc (loc, code, type, arg01,
12631 fold_convert_loc (loc, TREE_TYPE (arg01),
12634 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12635 if (TREE_CODE (arg01) == INTEGER_CST
12636 && TREE_CODE (arg11) == INTEGER_CST)
12638 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12639 fold_convert_loc (loc, itype, arg11));
12640 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12641 return fold_build2_loc (loc, code, type, tem,
12642 fold_convert_loc (loc, itype, arg10));
12646 /* Attempt to simplify equality/inequality comparisons of complex
12647 values. Only lower the comparison if the result is known or
12648 can be simplified to a single scalar comparison. */
12649 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12650 || TREE_CODE (arg0) == COMPLEX_CST)
12651 && (TREE_CODE (arg1) == COMPLEX_EXPR
12652 || TREE_CODE (arg1) == COMPLEX_CST))
12654 tree real0, imag0, real1, imag1;
12657 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12659 real0 = TREE_OPERAND (arg0, 0);
12660 imag0 = TREE_OPERAND (arg0, 1);
12664 real0 = TREE_REALPART (arg0);
12665 imag0 = TREE_IMAGPART (arg0);
12668 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12670 real1 = TREE_OPERAND (arg1, 0);
12671 imag1 = TREE_OPERAND (arg1, 1);
12675 real1 = TREE_REALPART (arg1);
12676 imag1 = TREE_IMAGPART (arg1);
12679 rcond = fold_binary_loc (loc, code, type, real0, real1);
12680 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12682 if (integer_zerop (rcond))
12684 if (code == EQ_EXPR)
12685 return omit_two_operands_loc (loc, type, boolean_false_node,
12687 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12691 if (code == NE_EXPR)
12692 return omit_two_operands_loc (loc, type, boolean_true_node,
12694 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12698 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12699 if (icond && TREE_CODE (icond) == INTEGER_CST)
12701 if (integer_zerop (icond))
12703 if (code == EQ_EXPR)
12704 return omit_two_operands_loc (loc, type, boolean_false_node,
12706 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12710 if (code == NE_EXPR)
12711 return omit_two_operands_loc (loc, type, boolean_true_node,
12713 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12724 tem = fold_comparison (loc, code, type, op0, op1);
12725 if (tem != NULL_TREE)
12728 /* Transform comparisons of the form X +- C CMP X. */
12729 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12730 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12731 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12732 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12733 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12734 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12736 tree arg01 = TREE_OPERAND (arg0, 1);
12737 enum tree_code code0 = TREE_CODE (arg0);
12740 if (TREE_CODE (arg01) == REAL_CST)
12741 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12743 is_positive = tree_int_cst_sgn (arg01);
12745 /* (X - c) > X becomes false. */
12746 if (code == GT_EXPR
12747 && ((code0 == MINUS_EXPR && is_positive >= 0)
12748 || (code0 == PLUS_EXPR && is_positive <= 0)))
12750 if (TREE_CODE (arg01) == INTEGER_CST
12751 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12752 fold_overflow_warning (("assuming signed overflow does not "
12753 "occur when assuming that (X - c) > X "
12754 "is always false"),
12755 WARN_STRICT_OVERFLOW_ALL);
12756 return constant_boolean_node (0, type);
12759 /* Likewise (X + c) < X becomes false. */
12760 if (code == LT_EXPR
12761 && ((code0 == PLUS_EXPR && is_positive >= 0)
12762 || (code0 == MINUS_EXPR && is_positive <= 0)))
12764 if (TREE_CODE (arg01) == INTEGER_CST
12765 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12766 fold_overflow_warning (("assuming signed overflow does not "
12767 "occur when assuming that "
12768 "(X + c) < X is always false"),
12769 WARN_STRICT_OVERFLOW_ALL);
12770 return constant_boolean_node (0, type);
12773 /* Convert (X - c) <= X to true. */
12774 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12776 && ((code0 == MINUS_EXPR && is_positive >= 0)
12777 || (code0 == PLUS_EXPR && is_positive <= 0)))
12779 if (TREE_CODE (arg01) == INTEGER_CST
12780 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12781 fold_overflow_warning (("assuming signed overflow does not "
12782 "occur when assuming that "
12783 "(X - c) <= X is always true"),
12784 WARN_STRICT_OVERFLOW_ALL);
12785 return constant_boolean_node (1, type);
12788 /* Convert (X + c) >= X to true. */
12789 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12791 && ((code0 == PLUS_EXPR && is_positive >= 0)
12792 || (code0 == MINUS_EXPR && is_positive <= 0)))
12794 if (TREE_CODE (arg01) == INTEGER_CST
12795 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12796 fold_overflow_warning (("assuming signed overflow does not "
12797 "occur when assuming that "
12798 "(X + c) >= X is always true"),
12799 WARN_STRICT_OVERFLOW_ALL);
12800 return constant_boolean_node (1, type);
12803 if (TREE_CODE (arg01) == INTEGER_CST)
12805 /* Convert X + c > X and X - c < X to true for integers. */
12806 if (code == GT_EXPR
12807 && ((code0 == PLUS_EXPR && is_positive > 0)
12808 || (code0 == MINUS_EXPR && is_positive < 0)))
12810 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12811 fold_overflow_warning (("assuming signed overflow does "
12812 "not occur when assuming that "
12813 "(X + c) > X is always true"),
12814 WARN_STRICT_OVERFLOW_ALL);
12815 return constant_boolean_node (1, type);
12818 if (code == LT_EXPR
12819 && ((code0 == MINUS_EXPR && is_positive > 0)
12820 || (code0 == PLUS_EXPR && is_positive < 0)))
12822 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12823 fold_overflow_warning (("assuming signed overflow does "
12824 "not occur when assuming that "
12825 "(X - c) < X is always true"),
12826 WARN_STRICT_OVERFLOW_ALL);
12827 return constant_boolean_node (1, type);
12830 /* Convert X + c <= X and X - c >= X to false for integers. */
12831 if (code == LE_EXPR
12832 && ((code0 == PLUS_EXPR && is_positive > 0)
12833 || (code0 == MINUS_EXPR && is_positive < 0)))
12835 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12836 fold_overflow_warning (("assuming signed overflow does "
12837 "not occur when assuming that "
12838 "(X + c) <= X is always false"),
12839 WARN_STRICT_OVERFLOW_ALL);
12840 return constant_boolean_node (0, type);
12843 if (code == GE_EXPR
12844 && ((code0 == MINUS_EXPR && is_positive > 0)
12845 || (code0 == PLUS_EXPR && is_positive < 0)))
12847 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12848 fold_overflow_warning (("assuming signed overflow does "
12849 "not occur when assuming that "
12850 "(X - c) >= X is always false"),
12851 WARN_STRICT_OVERFLOW_ALL);
12852 return constant_boolean_node (0, type);
12857 /* Comparisons with the highest or lowest possible integer of
12858 the specified precision will have known values. */
12860 tree arg1_type = TREE_TYPE (arg1);
12861 unsigned int width = TYPE_PRECISION (arg1_type);
12863 if (TREE_CODE (arg1) == INTEGER_CST
12864 && width <= 2 * HOST_BITS_PER_WIDE_INT
12865 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12867 HOST_WIDE_INT signed_max_hi;
12868 unsigned HOST_WIDE_INT signed_max_lo;
12869 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12871 if (width <= HOST_BITS_PER_WIDE_INT)
12873 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12878 if (TYPE_UNSIGNED (arg1_type))
12880 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12886 max_lo = signed_max_lo;
12887 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12893 width -= HOST_BITS_PER_WIDE_INT;
12894 signed_max_lo = -1;
12895 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12900 if (TYPE_UNSIGNED (arg1_type))
12902 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12907 max_hi = signed_max_hi;
12908 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12912 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12913 && TREE_INT_CST_LOW (arg1) == max_lo)
12917 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12920 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12923 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12926 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12928 /* The GE_EXPR and LT_EXPR cases above are not normally
12929 reached because of previous transformations. */
12934 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12936 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12940 arg1 = const_binop (PLUS_EXPR, arg1,
12941 build_int_cst (TREE_TYPE (arg1), 1));
12942 return fold_build2_loc (loc, EQ_EXPR, type,
12943 fold_convert_loc (loc,
12944 TREE_TYPE (arg1), arg0),
12947 arg1 = const_binop (PLUS_EXPR, arg1,
12948 build_int_cst (TREE_TYPE (arg1), 1));
12949 return fold_build2_loc (loc, NE_EXPR, type,
12950 fold_convert_loc (loc, TREE_TYPE (arg1),
12956 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12958 && TREE_INT_CST_LOW (arg1) == min_lo)
12962 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12965 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12968 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12971 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12976 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12978 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12982 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12983 return fold_build2_loc (loc, NE_EXPR, type,
12984 fold_convert_loc (loc,
12985 TREE_TYPE (arg1), arg0),
12988 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
12989 return fold_build2_loc (loc, EQ_EXPR, type,
12990 fold_convert_loc (loc, TREE_TYPE (arg1),
12997 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12998 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12999 && TYPE_UNSIGNED (arg1_type)
13000 /* We will flip the signedness of the comparison operator
13001 associated with the mode of arg1, so the sign bit is
13002 specified by this mode. Check that arg1 is the signed
13003 max associated with this sign bit. */
13004 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13005 /* signed_type does not work on pointer types. */
13006 && INTEGRAL_TYPE_P (arg1_type))
13008 /* The following case also applies to X < signed_max+1
13009 and X >= signed_max+1 because previous transformations. */
13010 if (code == LE_EXPR || code == GT_EXPR)
13013 st = signed_type_for (TREE_TYPE (arg1));
13014 return fold_build2_loc (loc,
13015 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13016 type, fold_convert_loc (loc, st, arg0),
13017 build_int_cst (st, 0));
13023 /* If we are comparing an ABS_EXPR with a constant, we can
13024 convert all the cases into explicit comparisons, but they may
13025 well not be faster than doing the ABS and one comparison.
13026 But ABS (X) <= C is a range comparison, which becomes a subtraction
13027 and a comparison, and is probably faster. */
13028 if (code == LE_EXPR
13029 && TREE_CODE (arg1) == INTEGER_CST
13030 && TREE_CODE (arg0) == ABS_EXPR
13031 && ! TREE_SIDE_EFFECTS (arg0)
13032 && (0 != (tem = negate_expr (arg1)))
13033 && TREE_CODE (tem) == INTEGER_CST
13034 && !TREE_OVERFLOW (tem))
13035 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13036 build2 (GE_EXPR, type,
13037 TREE_OPERAND (arg0, 0), tem),
13038 build2 (LE_EXPR, type,
13039 TREE_OPERAND (arg0, 0), arg1));
13041 /* Convert ABS_EXPR<x> >= 0 to true. */
13042 strict_overflow_p = false;
13043 if (code == GE_EXPR
13044 && (integer_zerop (arg1)
13045 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13046 && real_zerop (arg1)))
13047 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13049 if (strict_overflow_p)
13050 fold_overflow_warning (("assuming signed overflow does not occur "
13051 "when simplifying comparison of "
13052 "absolute value and zero"),
13053 WARN_STRICT_OVERFLOW_CONDITIONAL);
13054 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13057 /* Convert ABS_EXPR<x> < 0 to false. */
13058 strict_overflow_p = false;
13059 if (code == LT_EXPR
13060 && (integer_zerop (arg1) || real_zerop (arg1))
13061 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13063 if (strict_overflow_p)
13064 fold_overflow_warning (("assuming signed overflow does not occur "
13065 "when simplifying comparison of "
13066 "absolute value and zero"),
13067 WARN_STRICT_OVERFLOW_CONDITIONAL);
13068 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13071 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13072 and similarly for >= into !=. */
13073 if ((code == LT_EXPR || code == GE_EXPR)
13074 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13075 && TREE_CODE (arg1) == LSHIFT_EXPR
13076 && integer_onep (TREE_OPERAND (arg1, 0)))
13077 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13078 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13079 TREE_OPERAND (arg1, 1)),
13080 build_int_cst (TREE_TYPE (arg0), 0));
13082 if ((code == LT_EXPR || code == GE_EXPR)
13083 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13084 && CONVERT_EXPR_P (arg1)
13085 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13086 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13088 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13089 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13090 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13091 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13092 build_int_cst (TREE_TYPE (arg0), 0));
13097 case UNORDERED_EXPR:
13105 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13107 t1 = fold_relational_const (code, type, arg0, arg1);
13108 if (t1 != NULL_TREE)
13112 /* If the first operand is NaN, the result is constant. */
13113 if (TREE_CODE (arg0) == REAL_CST
13114 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13115 && (code != LTGT_EXPR || ! flag_trapping_math))
13117 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13118 ? integer_zero_node
13119 : integer_one_node;
13120 return omit_one_operand_loc (loc, type, t1, arg1);
13123 /* If the second operand is NaN, the result is constant. */
13124 if (TREE_CODE (arg1) == REAL_CST
13125 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13126 && (code != LTGT_EXPR || ! flag_trapping_math))
13128 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13129 ? integer_zero_node
13130 : integer_one_node;
13131 return omit_one_operand_loc (loc, type, t1, arg0);
13134 /* Simplify unordered comparison of something with itself. */
13135 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13136 && operand_equal_p (arg0, arg1, 0))
13137 return constant_boolean_node (1, type);
13139 if (code == LTGT_EXPR
13140 && !flag_trapping_math
13141 && operand_equal_p (arg0, arg1, 0))
13142 return constant_boolean_node (0, type);
13144 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13146 tree targ0 = strip_float_extensions (arg0);
13147 tree targ1 = strip_float_extensions (arg1);
13148 tree newtype = TREE_TYPE (targ0);
13150 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13151 newtype = TREE_TYPE (targ1);
13153 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13154 return fold_build2_loc (loc, code, type,
13155 fold_convert_loc (loc, newtype, targ0),
13156 fold_convert_loc (loc, newtype, targ1));
13161 case COMPOUND_EXPR:
13162 /* When pedantic, a compound expression can be neither an lvalue
13163 nor an integer constant expression. */
13164 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13166 /* Don't let (0, 0) be null pointer constant. */
13167 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13168 : fold_convert_loc (loc, type, arg1);
13169 return pedantic_non_lvalue_loc (loc, tem);
13172 if ((TREE_CODE (arg0) == REAL_CST
13173 && TREE_CODE (arg1) == REAL_CST)
13174 || (TREE_CODE (arg0) == INTEGER_CST
13175 && TREE_CODE (arg1) == INTEGER_CST))
13176 return build_complex (type, arg0, arg1);
13177 if (TREE_CODE (arg0) == REALPART_EXPR
13178 && TREE_CODE (arg1) == IMAGPART_EXPR
13179 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 0)))
13180 == TYPE_MAIN_VARIANT (type))
13181 && operand_equal_p (TREE_OPERAND (arg0, 0),
13182 TREE_OPERAND (arg1, 0), 0))
13183 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13184 TREE_OPERAND (arg1, 0));
13188 /* An ASSERT_EXPR should never be passed to fold_binary. */
13189 gcc_unreachable ();
13193 } /* switch (code) */
13196 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13197 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13201 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13203 switch (TREE_CODE (*tp))
13209 *walk_subtrees = 0;
13211 /* ... fall through ... */
13218 /* Return whether the sub-tree ST contains a label which is accessible from
13219 outside the sub-tree. */
13222 contains_label_p (tree st)
13225 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13228 /* Fold a ternary expression of code CODE and type TYPE with operands
13229 OP0, OP1, and OP2. Return the folded expression if folding is
13230 successful. Otherwise, return NULL_TREE. */
13233 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13234 tree op0, tree op1, tree op2)
13237 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13238 enum tree_code_class kind = TREE_CODE_CLASS (code);
13240 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13241 && TREE_CODE_LENGTH (code) == 3);
13243 /* Strip any conversions that don't change the mode. This is safe
13244 for every expression, except for a comparison expression because
13245 its signedness is derived from its operands. So, in the latter
13246 case, only strip conversions that don't change the signedness.
13248 Note that this is done as an internal manipulation within the
13249 constant folder, in order to find the simplest representation of
13250 the arguments so that their form can be studied. In any cases,
13251 the appropriate type conversions should be put back in the tree
13252 that will get out of the constant folder. */
13273 case COMPONENT_REF:
13274 if (TREE_CODE (arg0) == CONSTRUCTOR
13275 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13277 unsigned HOST_WIDE_INT idx;
13279 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13286 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13287 so all simple results must be passed through pedantic_non_lvalue. */
13288 if (TREE_CODE (arg0) == INTEGER_CST)
13290 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13291 tem = integer_zerop (arg0) ? op2 : op1;
13292 /* Only optimize constant conditions when the selected branch
13293 has the same type as the COND_EXPR. This avoids optimizing
13294 away "c ? x : throw", where the throw has a void type.
13295 Avoid throwing away that operand which contains label. */
13296 if ((!TREE_SIDE_EFFECTS (unused_op)
13297 || !contains_label_p (unused_op))
13298 && (! VOID_TYPE_P (TREE_TYPE (tem))
13299 || VOID_TYPE_P (type)))
13300 return pedantic_non_lvalue_loc (loc, tem);
13303 if (operand_equal_p (arg1, op2, 0))
13304 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13306 /* If we have A op B ? A : C, we may be able to convert this to a
13307 simpler expression, depending on the operation and the values
13308 of B and C. Signed zeros prevent all of these transformations,
13309 for reasons given above each one.
13311 Also try swapping the arguments and inverting the conditional. */
13312 if (COMPARISON_CLASS_P (arg0)
13313 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13314 arg1, TREE_OPERAND (arg0, 1))
13315 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13317 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13322 if (COMPARISON_CLASS_P (arg0)
13323 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13325 TREE_OPERAND (arg0, 1))
13326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13328 location_t loc0 = expr_location_or (arg0, loc);
13329 tem = fold_truth_not_expr (loc0, arg0);
13330 if (tem && COMPARISON_CLASS_P (tem))
13332 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13338 /* If the second operand is simpler than the third, swap them
13339 since that produces better jump optimization results. */
13340 if (truth_value_p (TREE_CODE (arg0))
13341 && tree_swap_operands_p (op1, op2, false))
13343 location_t loc0 = expr_location_or (arg0, loc);
13344 /* See if this can be inverted. If it can't, possibly because
13345 it was a floating-point inequality comparison, don't do
13347 tem = fold_truth_not_expr (loc0, arg0);
13349 return fold_build3_loc (loc, code, type, tem, op2, op1);
13352 /* Convert A ? 1 : 0 to simply A. */
13353 if (integer_onep (op1)
13354 && integer_zerop (op2)
13355 /* If we try to convert OP0 to our type, the
13356 call to fold will try to move the conversion inside
13357 a COND, which will recurse. In that case, the COND_EXPR
13358 is probably the best choice, so leave it alone. */
13359 && type == TREE_TYPE (arg0))
13360 return pedantic_non_lvalue_loc (loc, arg0);
13362 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13363 over COND_EXPR in cases such as floating point comparisons. */
13364 if (integer_zerop (op1)
13365 && integer_onep (op2)
13366 && truth_value_p (TREE_CODE (arg0)))
13367 return pedantic_non_lvalue_loc (loc,
13368 fold_convert_loc (loc, type,
13369 invert_truthvalue_loc (loc,
13372 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13373 if (TREE_CODE (arg0) == LT_EXPR
13374 && integer_zerop (TREE_OPERAND (arg0, 1))
13375 && integer_zerop (op2)
13376 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13378 /* sign_bit_p only checks ARG1 bits within A's precision.
13379 If <sign bit of A> has wider type than A, bits outside
13380 of A's precision in <sign bit of A> need to be checked.
13381 If they are all 0, this optimization needs to be done
13382 in unsigned A's type, if they are all 1 in signed A's type,
13383 otherwise this can't be done. */
13384 if (TYPE_PRECISION (TREE_TYPE (tem))
13385 < TYPE_PRECISION (TREE_TYPE (arg1))
13386 && TYPE_PRECISION (TREE_TYPE (tem))
13387 < TYPE_PRECISION (type))
13389 unsigned HOST_WIDE_INT mask_lo;
13390 HOST_WIDE_INT mask_hi;
13391 int inner_width, outer_width;
13394 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13395 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13396 if (outer_width > TYPE_PRECISION (type))
13397 outer_width = TYPE_PRECISION (type);
13399 if (outer_width > HOST_BITS_PER_WIDE_INT)
13401 mask_hi = ((unsigned HOST_WIDE_INT) -1
13402 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13408 mask_lo = ((unsigned HOST_WIDE_INT) -1
13409 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13411 if (inner_width > HOST_BITS_PER_WIDE_INT)
13413 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13414 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13418 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13419 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13421 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13422 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13424 tem_type = signed_type_for (TREE_TYPE (tem));
13425 tem = fold_convert_loc (loc, tem_type, tem);
13427 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13428 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13430 tem_type = unsigned_type_for (TREE_TYPE (tem));
13431 tem = fold_convert_loc (loc, tem_type, tem);
13439 fold_convert_loc (loc, type,
13440 fold_build2_loc (loc, BIT_AND_EXPR,
13441 TREE_TYPE (tem), tem,
13442 fold_convert_loc (loc,
13447 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13448 already handled above. */
13449 if (TREE_CODE (arg0) == BIT_AND_EXPR
13450 && integer_onep (TREE_OPERAND (arg0, 1))
13451 && integer_zerop (op2)
13452 && integer_pow2p (arg1))
13454 tree tem = TREE_OPERAND (arg0, 0);
13456 if (TREE_CODE (tem) == RSHIFT_EXPR
13457 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13458 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13459 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13460 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13461 TREE_OPERAND (tem, 0), arg1);
13464 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13465 is probably obsolete because the first operand should be a
13466 truth value (that's why we have the two cases above), but let's
13467 leave it in until we can confirm this for all front-ends. */
13468 if (integer_zerop (op2)
13469 && TREE_CODE (arg0) == NE_EXPR
13470 && integer_zerop (TREE_OPERAND (arg0, 1))
13471 && integer_pow2p (arg1)
13472 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13473 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13474 arg1, OEP_ONLY_CONST))
13475 return pedantic_non_lvalue_loc (loc,
13476 fold_convert_loc (loc, type,
13477 TREE_OPERAND (arg0, 0)));
13479 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13480 if (integer_zerop (op2)
13481 && truth_value_p (TREE_CODE (arg0))
13482 && truth_value_p (TREE_CODE (arg1)))
13483 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13484 fold_convert_loc (loc, type, arg0),
13487 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13488 if (integer_onep (op2)
13489 && truth_value_p (TREE_CODE (arg0))
13490 && truth_value_p (TREE_CODE (arg1)))
13492 location_t loc0 = expr_location_or (arg0, loc);
13493 /* Only perform transformation if ARG0 is easily inverted. */
13494 tem = fold_truth_not_expr (loc0, arg0);
13496 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13497 fold_convert_loc (loc, type, tem),
13501 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13502 if (integer_zerop (arg1)
13503 && truth_value_p (TREE_CODE (arg0))
13504 && truth_value_p (TREE_CODE (op2)))
13506 location_t loc0 = expr_location_or (arg0, loc);
13507 /* Only perform transformation if ARG0 is easily inverted. */
13508 tem = fold_truth_not_expr (loc0, arg0);
13510 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13511 fold_convert_loc (loc, type, tem),
13515 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13516 if (integer_onep (arg1)
13517 && truth_value_p (TREE_CODE (arg0))
13518 && truth_value_p (TREE_CODE (op2)))
13519 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13520 fold_convert_loc (loc, type, arg0),
13526 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13527 of fold_ternary on them. */
13528 gcc_unreachable ();
13530 case BIT_FIELD_REF:
13531 if ((TREE_CODE (arg0) == VECTOR_CST
13532 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13533 && type == TREE_TYPE (TREE_TYPE (arg0)))
13535 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13536 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13539 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13540 && (idx % width) == 0
13541 && (idx = idx / width)
13542 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13544 tree elements = NULL_TREE;
13546 if (TREE_CODE (arg0) == VECTOR_CST)
13547 elements = TREE_VECTOR_CST_ELTS (arg0);
13550 unsigned HOST_WIDE_INT idx;
13553 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13554 elements = tree_cons (NULL_TREE, value, elements);
13556 while (idx-- > 0 && elements)
13557 elements = TREE_CHAIN (elements);
13559 return TREE_VALUE (elements);
13561 return build_zero_cst (type);
13565 /* A bit-field-ref that referenced the full argument can be stripped. */
13566 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13567 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13568 && integer_zerop (op2))
13569 return fold_convert_loc (loc, type, arg0);
13574 /* For integers we can decompose the FMA if possible. */
13575 if (TREE_CODE (arg0) == INTEGER_CST
13576 && TREE_CODE (arg1) == INTEGER_CST)
13577 return fold_build2_loc (loc, PLUS_EXPR, type,
13578 const_binop (MULT_EXPR, arg0, arg1), arg2);
13579 if (integer_zerop (arg2))
13580 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13582 return fold_fma (loc, type, arg0, arg1, arg2);
13586 } /* switch (code) */
13589 /* Perform constant folding and related simplification of EXPR.
13590 The related simplifications include x*1 => x, x*0 => 0, etc.,
13591 and application of the associative law.
13592 NOP_EXPR conversions may be removed freely (as long as we
13593 are careful not to change the type of the overall expression).
13594 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13595 but we can constant-fold them if they have constant operands. */
13597 #ifdef ENABLE_FOLD_CHECKING
13598 # define fold(x) fold_1 (x)
13599 static tree fold_1 (tree);
13605 const tree t = expr;
13606 enum tree_code code = TREE_CODE (t);
13607 enum tree_code_class kind = TREE_CODE_CLASS (code);
13609 location_t loc = EXPR_LOCATION (expr);
13611 /* Return right away if a constant. */
13612 if (kind == tcc_constant)
13615 /* CALL_EXPR-like objects with variable numbers of operands are
13616 treated specially. */
13617 if (kind == tcc_vl_exp)
13619 if (code == CALL_EXPR)
13621 tem = fold_call_expr (loc, expr, false);
13622 return tem ? tem : expr;
13627 if (IS_EXPR_CODE_CLASS (kind))
13629 tree type = TREE_TYPE (t);
13630 tree op0, op1, op2;
13632 switch (TREE_CODE_LENGTH (code))
13635 op0 = TREE_OPERAND (t, 0);
13636 tem = fold_unary_loc (loc, code, type, op0);
13637 return tem ? tem : expr;
13639 op0 = TREE_OPERAND (t, 0);
13640 op1 = TREE_OPERAND (t, 1);
13641 tem = fold_binary_loc (loc, code, type, op0, op1);
13642 return tem ? tem : expr;
13644 op0 = TREE_OPERAND (t, 0);
13645 op1 = TREE_OPERAND (t, 1);
13646 op2 = TREE_OPERAND (t, 2);
13647 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13648 return tem ? tem : expr;
13658 tree op0 = TREE_OPERAND (t, 0);
13659 tree op1 = TREE_OPERAND (t, 1);
13661 if (TREE_CODE (op1) == INTEGER_CST
13662 && TREE_CODE (op0) == CONSTRUCTOR
13663 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13665 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13666 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13667 unsigned HOST_WIDE_INT begin = 0;
13669 /* Find a matching index by means of a binary search. */
13670 while (begin != end)
13672 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13673 tree index = VEC_index (constructor_elt, elts, middle)->index;
13675 if (TREE_CODE (index) == INTEGER_CST
13676 && tree_int_cst_lt (index, op1))
13677 begin = middle + 1;
13678 else if (TREE_CODE (index) == INTEGER_CST
13679 && tree_int_cst_lt (op1, index))
13681 else if (TREE_CODE (index) == RANGE_EXPR
13682 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13683 begin = middle + 1;
13684 else if (TREE_CODE (index) == RANGE_EXPR
13685 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13688 return VEC_index (constructor_elt, elts, middle)->value;
13696 return fold (DECL_INITIAL (t));
13700 } /* switch (code) */
13703 #ifdef ENABLE_FOLD_CHECKING
13706 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13707 static void fold_check_failed (const_tree, const_tree);
13708 void print_fold_checksum (const_tree);
13710 /* When --enable-checking=fold, compute a digest of expr before
13711 and after actual fold call to see if fold did not accidentally
13712 change original expr. */
13718 struct md5_ctx ctx;
13719 unsigned char checksum_before[16], checksum_after[16];
13722 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13723 md5_init_ctx (&ctx);
13724 fold_checksum_tree (expr, &ctx, ht);
13725 md5_finish_ctx (&ctx, checksum_before);
13728 ret = fold_1 (expr);
13730 md5_init_ctx (&ctx);
13731 fold_checksum_tree (expr, &ctx, ht);
13732 md5_finish_ctx (&ctx, checksum_after);
13735 if (memcmp (checksum_before, checksum_after, 16))
13736 fold_check_failed (expr, ret);
13742 print_fold_checksum (const_tree expr)
13744 struct md5_ctx ctx;
13745 unsigned char checksum[16], cnt;
13748 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13749 md5_init_ctx (&ctx);
13750 fold_checksum_tree (expr, &ctx, ht);
13751 md5_finish_ctx (&ctx, checksum);
13753 for (cnt = 0; cnt < 16; ++cnt)
13754 fprintf (stderr, "%02x", checksum[cnt]);
13755 putc ('\n', stderr);
13759 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13761 internal_error ("fold check: original tree changed by fold");
13765 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13768 enum tree_code code;
13769 union tree_node buf;
13774 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13775 <= sizeof (struct tree_function_decl))
13776 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13779 slot = (void **) htab_find_slot (ht, expr, INSERT);
13782 *slot = CONST_CAST_TREE (expr);
13783 code = TREE_CODE (expr);
13784 if (TREE_CODE_CLASS (code) == tcc_declaration
13785 && DECL_ASSEMBLER_NAME_SET_P (expr))
13787 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13788 memcpy ((char *) &buf, expr, tree_size (expr));
13789 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13790 expr = (tree) &buf;
13792 else if (TREE_CODE_CLASS (code) == tcc_type
13793 && (TYPE_POINTER_TO (expr)
13794 || TYPE_REFERENCE_TO (expr)
13795 || TYPE_CACHED_VALUES_P (expr)
13796 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13797 || TYPE_NEXT_VARIANT (expr)))
13799 /* Allow these fields to be modified. */
13801 memcpy ((char *) &buf, expr, tree_size (expr));
13802 expr = tmp = (tree) &buf;
13803 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13804 TYPE_POINTER_TO (tmp) = NULL;
13805 TYPE_REFERENCE_TO (tmp) = NULL;
13806 TYPE_NEXT_VARIANT (tmp) = NULL;
13807 if (TYPE_CACHED_VALUES_P (tmp))
13809 TYPE_CACHED_VALUES_P (tmp) = 0;
13810 TYPE_CACHED_VALUES (tmp) = NULL;
13813 md5_process_bytes (expr, tree_size (expr), ctx);
13814 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13815 if (TREE_CODE_CLASS (code) != tcc_type
13816 && TREE_CODE_CLASS (code) != tcc_declaration
13817 && code != TREE_LIST
13818 && code != SSA_NAME
13819 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13820 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13821 switch (TREE_CODE_CLASS (code))
13827 md5_process_bytes (TREE_STRING_POINTER (expr),
13828 TREE_STRING_LENGTH (expr), ctx);
13831 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13832 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13835 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13841 case tcc_exceptional:
13845 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13846 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13847 expr = TREE_CHAIN (expr);
13848 goto recursive_label;
13851 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13852 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13858 case tcc_expression:
13859 case tcc_reference:
13860 case tcc_comparison:
13863 case tcc_statement:
13865 len = TREE_OPERAND_LENGTH (expr);
13866 for (i = 0; i < len; ++i)
13867 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13869 case tcc_declaration:
13870 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13871 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13872 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13874 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13875 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13876 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13877 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13878 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13880 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13881 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13883 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13885 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13886 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13887 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13891 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13892 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13893 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13894 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13895 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13896 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13897 if (INTEGRAL_TYPE_P (expr)
13898 || SCALAR_FLOAT_TYPE_P (expr))
13900 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13901 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13903 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13904 if (TREE_CODE (expr) == RECORD_TYPE
13905 || TREE_CODE (expr) == UNION_TYPE
13906 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13907 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13908 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13915 /* Helper function for outputting the checksum of a tree T. When
13916 debugging with gdb, you can "define mynext" to be "next" followed
13917 by "call debug_fold_checksum (op0)", then just trace down till the
13920 DEBUG_FUNCTION void
13921 debug_fold_checksum (const_tree t)
13924 unsigned char checksum[16];
13925 struct md5_ctx ctx;
13926 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13928 md5_init_ctx (&ctx);
13929 fold_checksum_tree (t, &ctx, ht);
13930 md5_finish_ctx (&ctx, checksum);
13933 for (i = 0; i < 16; i++)
13934 fprintf (stderr, "%d ", checksum[i]);
13936 fprintf (stderr, "\n");
13941 /* Fold a unary tree expression with code CODE of type TYPE with an
13942 operand OP0. LOC is the location of the resulting expression.
13943 Return a folded expression if successful. Otherwise, return a tree
13944 expression with code CODE of type TYPE with an operand OP0. */
13947 fold_build1_stat_loc (location_t loc,
13948 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13951 #ifdef ENABLE_FOLD_CHECKING
13952 unsigned char checksum_before[16], checksum_after[16];
13953 struct md5_ctx ctx;
13956 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13957 md5_init_ctx (&ctx);
13958 fold_checksum_tree (op0, &ctx, ht);
13959 md5_finish_ctx (&ctx, checksum_before);
13963 tem = fold_unary_loc (loc, code, type, op0);
13965 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13967 #ifdef ENABLE_FOLD_CHECKING
13968 md5_init_ctx (&ctx);
13969 fold_checksum_tree (op0, &ctx, ht);
13970 md5_finish_ctx (&ctx, checksum_after);
13973 if (memcmp (checksum_before, checksum_after, 16))
13974 fold_check_failed (op0, tem);
13979 /* Fold a binary tree expression with code CODE of type TYPE with
13980 operands OP0 and OP1. LOC is the location of the resulting
13981 expression. Return a folded expression if successful. Otherwise,
13982 return a tree expression with code CODE of type TYPE with operands
13986 fold_build2_stat_loc (location_t loc,
13987 enum tree_code code, tree type, tree op0, tree op1
13991 #ifdef ENABLE_FOLD_CHECKING
13992 unsigned char checksum_before_op0[16],
13993 checksum_before_op1[16],
13994 checksum_after_op0[16],
13995 checksum_after_op1[16];
13996 struct md5_ctx ctx;
13999 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14000 md5_init_ctx (&ctx);
14001 fold_checksum_tree (op0, &ctx, ht);
14002 md5_finish_ctx (&ctx, checksum_before_op0);
14005 md5_init_ctx (&ctx);
14006 fold_checksum_tree (op1, &ctx, ht);
14007 md5_finish_ctx (&ctx, checksum_before_op1);
14011 tem = fold_binary_loc (loc, code, type, op0, op1);
14013 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14015 #ifdef ENABLE_FOLD_CHECKING
14016 md5_init_ctx (&ctx);
14017 fold_checksum_tree (op0, &ctx, ht);
14018 md5_finish_ctx (&ctx, checksum_after_op0);
14021 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14022 fold_check_failed (op0, tem);
14024 md5_init_ctx (&ctx);
14025 fold_checksum_tree (op1, &ctx, ht);
14026 md5_finish_ctx (&ctx, checksum_after_op1);
14029 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14030 fold_check_failed (op1, tem);
14035 /* Fold a ternary tree expression with code CODE of type TYPE with
14036 operands OP0, OP1, and OP2. Return a folded expression if
14037 successful. Otherwise, return a tree expression with code CODE of
14038 type TYPE with operands OP0, OP1, and OP2. */
14041 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14042 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14045 #ifdef ENABLE_FOLD_CHECKING
14046 unsigned char checksum_before_op0[16],
14047 checksum_before_op1[16],
14048 checksum_before_op2[16],
14049 checksum_after_op0[16],
14050 checksum_after_op1[16],
14051 checksum_after_op2[16];
14052 struct md5_ctx ctx;
14055 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14056 md5_init_ctx (&ctx);
14057 fold_checksum_tree (op0, &ctx, ht);
14058 md5_finish_ctx (&ctx, checksum_before_op0);
14061 md5_init_ctx (&ctx);
14062 fold_checksum_tree (op1, &ctx, ht);
14063 md5_finish_ctx (&ctx, checksum_before_op1);
14066 md5_init_ctx (&ctx);
14067 fold_checksum_tree (op2, &ctx, ht);
14068 md5_finish_ctx (&ctx, checksum_before_op2);
14072 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14073 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14075 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14077 #ifdef ENABLE_FOLD_CHECKING
14078 md5_init_ctx (&ctx);
14079 fold_checksum_tree (op0, &ctx, ht);
14080 md5_finish_ctx (&ctx, checksum_after_op0);
14083 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14084 fold_check_failed (op0, tem);
14086 md5_init_ctx (&ctx);
14087 fold_checksum_tree (op1, &ctx, ht);
14088 md5_finish_ctx (&ctx, checksum_after_op1);
14091 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14092 fold_check_failed (op1, tem);
14094 md5_init_ctx (&ctx);
14095 fold_checksum_tree (op2, &ctx, ht);
14096 md5_finish_ctx (&ctx, checksum_after_op2);
14099 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14100 fold_check_failed (op2, tem);
14105 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14106 arguments in ARGARRAY, and a null static chain.
14107 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14108 of type TYPE from the given operands as constructed by build_call_array. */
14111 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14112 int nargs, tree *argarray)
14115 #ifdef ENABLE_FOLD_CHECKING
14116 unsigned char checksum_before_fn[16],
14117 checksum_before_arglist[16],
14118 checksum_after_fn[16],
14119 checksum_after_arglist[16];
14120 struct md5_ctx ctx;
14124 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14125 md5_init_ctx (&ctx);
14126 fold_checksum_tree (fn, &ctx, ht);
14127 md5_finish_ctx (&ctx, checksum_before_fn);
14130 md5_init_ctx (&ctx);
14131 for (i = 0; i < nargs; i++)
14132 fold_checksum_tree (argarray[i], &ctx, ht);
14133 md5_finish_ctx (&ctx, checksum_before_arglist);
14137 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14139 #ifdef ENABLE_FOLD_CHECKING
14140 md5_init_ctx (&ctx);
14141 fold_checksum_tree (fn, &ctx, ht);
14142 md5_finish_ctx (&ctx, checksum_after_fn);
14145 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14146 fold_check_failed (fn, tem);
14148 md5_init_ctx (&ctx);
14149 for (i = 0; i < nargs; i++)
14150 fold_checksum_tree (argarray[i], &ctx, ht);
14151 md5_finish_ctx (&ctx, checksum_after_arglist);
14154 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14155 fold_check_failed (NULL_TREE, tem);
14160 /* Perform constant folding and related simplification of initializer
14161 expression EXPR. These behave identically to "fold_buildN" but ignore
14162 potential run-time traps and exceptions that fold must preserve. */
14164 #define START_FOLD_INIT \
14165 int saved_signaling_nans = flag_signaling_nans;\
14166 int saved_trapping_math = flag_trapping_math;\
14167 int saved_rounding_math = flag_rounding_math;\
14168 int saved_trapv = flag_trapv;\
14169 int saved_folding_initializer = folding_initializer;\
14170 flag_signaling_nans = 0;\
14171 flag_trapping_math = 0;\
14172 flag_rounding_math = 0;\
14174 folding_initializer = 1;
14176 #define END_FOLD_INIT \
14177 flag_signaling_nans = saved_signaling_nans;\
14178 flag_trapping_math = saved_trapping_math;\
14179 flag_rounding_math = saved_rounding_math;\
14180 flag_trapv = saved_trapv;\
14181 folding_initializer = saved_folding_initializer;
14184 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14185 tree type, tree op)
14190 result = fold_build1_loc (loc, code, type, op);
14197 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14198 tree type, tree op0, tree op1)
14203 result = fold_build2_loc (loc, code, type, op0, op1);
14210 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14211 tree type, tree op0, tree op1, tree op2)
14216 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14223 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14224 int nargs, tree *argarray)
14229 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14235 #undef START_FOLD_INIT
14236 #undef END_FOLD_INIT
14238 /* Determine if first argument is a multiple of second argument. Return 0 if
14239 it is not, or we cannot easily determined it to be.
14241 An example of the sort of thing we care about (at this point; this routine
14242 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14243 fold cases do now) is discovering that
14245 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14251 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14253 This code also handles discovering that
14255 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14257 is a multiple of 8 so we don't have to worry about dealing with a
14258 possible remainder.
14260 Note that we *look* inside a SAVE_EXPR only to determine how it was
14261 calculated; it is not safe for fold to do much of anything else with the
14262 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14263 at run time. For example, the latter example above *cannot* be implemented
14264 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14265 evaluation time of the original SAVE_EXPR is not necessarily the same at
14266 the time the new expression is evaluated. The only optimization of this
14267 sort that would be valid is changing
14269 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14273 SAVE_EXPR (I) * SAVE_EXPR (J)
14275 (where the same SAVE_EXPR (J) is used in the original and the
14276 transformed version). */
14279 multiple_of_p (tree type, const_tree top, const_tree bottom)
14281 if (operand_equal_p (top, bottom, 0))
14284 if (TREE_CODE (type) != INTEGER_TYPE)
14287 switch (TREE_CODE (top))
14290 /* Bitwise and provides a power of two multiple. If the mask is
14291 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14292 if (!integer_pow2p (bottom))
14297 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14298 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14302 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14303 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14306 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14310 op1 = TREE_OPERAND (top, 1);
14311 /* const_binop may not detect overflow correctly,
14312 so check for it explicitly here. */
14313 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14314 > TREE_INT_CST_LOW (op1)
14315 && TREE_INT_CST_HIGH (op1) == 0
14316 && 0 != (t1 = fold_convert (type,
14317 const_binop (LSHIFT_EXPR,
14320 && !TREE_OVERFLOW (t1))
14321 return multiple_of_p (type, t1, bottom);
14326 /* Can't handle conversions from non-integral or wider integral type. */
14327 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14328 || (TYPE_PRECISION (type)
14329 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14332 /* .. fall through ... */
14335 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14338 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14339 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14342 if (TREE_CODE (bottom) != INTEGER_CST
14343 || integer_zerop (bottom)
14344 || (TYPE_UNSIGNED (type)
14345 && (tree_int_cst_sgn (top) < 0
14346 || tree_int_cst_sgn (bottom) < 0)))
14348 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14356 /* Return true if CODE or TYPE is known to be non-negative. */
14359 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14361 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14362 && truth_value_p (code))
14363 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14364 have a signed:1 type (where the value is -1 and 0). */
14369 /* Return true if (CODE OP0) is known to be non-negative. If the return
14370 value is based on the assumption that signed overflow is undefined,
14371 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14372 *STRICT_OVERFLOW_P. */
14375 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14376 bool *strict_overflow_p)
14378 if (TYPE_UNSIGNED (type))
14384 /* We can't return 1 if flag_wrapv is set because
14385 ABS_EXPR<INT_MIN> = INT_MIN. */
14386 if (!INTEGRAL_TYPE_P (type))
14388 if (TYPE_OVERFLOW_UNDEFINED (type))
14390 *strict_overflow_p = true;
14395 case NON_LVALUE_EXPR:
14397 case FIX_TRUNC_EXPR:
14398 return tree_expr_nonnegative_warnv_p (op0,
14399 strict_overflow_p);
14403 tree inner_type = TREE_TYPE (op0);
14404 tree outer_type = type;
14406 if (TREE_CODE (outer_type) == REAL_TYPE)
14408 if (TREE_CODE (inner_type) == REAL_TYPE)
14409 return tree_expr_nonnegative_warnv_p (op0,
14410 strict_overflow_p);
14411 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14413 if (TYPE_UNSIGNED (inner_type))
14415 return tree_expr_nonnegative_warnv_p (op0,
14416 strict_overflow_p);
14419 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14421 if (TREE_CODE (inner_type) == REAL_TYPE)
14422 return tree_expr_nonnegative_warnv_p (op0,
14423 strict_overflow_p);
14424 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14425 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14426 && TYPE_UNSIGNED (inner_type);
14432 return tree_simple_nonnegative_warnv_p (code, type);
14435 /* We don't know sign of `t', so be conservative and return false. */
14439 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14440 value is based on the assumption that signed overflow is undefined,
14441 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14442 *STRICT_OVERFLOW_P. */
14445 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14446 tree op1, bool *strict_overflow_p)
14448 if (TYPE_UNSIGNED (type))
14453 case POINTER_PLUS_EXPR:
14455 if (FLOAT_TYPE_P (type))
14456 return (tree_expr_nonnegative_warnv_p (op0,
14458 && tree_expr_nonnegative_warnv_p (op1,
14459 strict_overflow_p));
14461 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14462 both unsigned and at least 2 bits shorter than the result. */
14463 if (TREE_CODE (type) == INTEGER_TYPE
14464 && TREE_CODE (op0) == NOP_EXPR
14465 && TREE_CODE (op1) == NOP_EXPR)
14467 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14468 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14469 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14470 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14472 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14473 TYPE_PRECISION (inner2)) + 1;
14474 return prec < TYPE_PRECISION (type);
14480 if (FLOAT_TYPE_P (type))
14482 /* x * x for floating point x is always non-negative. */
14483 if (operand_equal_p (op0, op1, 0))
14485 return (tree_expr_nonnegative_warnv_p (op0,
14487 && tree_expr_nonnegative_warnv_p (op1,
14488 strict_overflow_p));
14491 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14492 both unsigned and their total bits is shorter than the result. */
14493 if (TREE_CODE (type) == INTEGER_TYPE
14494 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14495 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14497 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14498 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14500 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14501 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14504 bool unsigned0 = TYPE_UNSIGNED (inner0);
14505 bool unsigned1 = TYPE_UNSIGNED (inner1);
14507 if (TREE_CODE (op0) == INTEGER_CST)
14508 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14510 if (TREE_CODE (op1) == INTEGER_CST)
14511 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14513 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14514 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14516 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14517 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14518 : TYPE_PRECISION (inner0);
14520 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14521 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14522 : TYPE_PRECISION (inner1);
14524 return precision0 + precision1 < TYPE_PRECISION (type);
14531 return (tree_expr_nonnegative_warnv_p (op0,
14533 || tree_expr_nonnegative_warnv_p (op1,
14534 strict_overflow_p));
14540 case TRUNC_DIV_EXPR:
14541 case CEIL_DIV_EXPR:
14542 case FLOOR_DIV_EXPR:
14543 case ROUND_DIV_EXPR:
14544 return (tree_expr_nonnegative_warnv_p (op0,
14546 && tree_expr_nonnegative_warnv_p (op1,
14547 strict_overflow_p));
14549 case TRUNC_MOD_EXPR:
14550 case CEIL_MOD_EXPR:
14551 case FLOOR_MOD_EXPR:
14552 case ROUND_MOD_EXPR:
14553 return tree_expr_nonnegative_warnv_p (op0,
14554 strict_overflow_p);
14556 return tree_simple_nonnegative_warnv_p (code, type);
14559 /* We don't know sign of `t', so be conservative and return false. */
14563 /* Return true if T is known to be non-negative. If the return
14564 value is based on the assumption that signed overflow is undefined,
14565 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14566 *STRICT_OVERFLOW_P. */
14569 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14571 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14574 switch (TREE_CODE (t))
14577 return tree_int_cst_sgn (t) >= 0;
14580 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14583 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14586 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14588 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14589 strict_overflow_p));
14591 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14594 /* We don't know sign of `t', so be conservative and return false. */
14598 /* Return true if T is known to be non-negative. If the return
14599 value is based on the assumption that signed overflow is undefined,
14600 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14601 *STRICT_OVERFLOW_P. */
14604 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14605 tree arg0, tree arg1, bool *strict_overflow_p)
14607 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14608 switch (DECL_FUNCTION_CODE (fndecl))
14610 CASE_FLT_FN (BUILT_IN_ACOS):
14611 CASE_FLT_FN (BUILT_IN_ACOSH):
14612 CASE_FLT_FN (BUILT_IN_CABS):
14613 CASE_FLT_FN (BUILT_IN_COSH):
14614 CASE_FLT_FN (BUILT_IN_ERFC):
14615 CASE_FLT_FN (BUILT_IN_EXP):
14616 CASE_FLT_FN (BUILT_IN_EXP10):
14617 CASE_FLT_FN (BUILT_IN_EXP2):
14618 CASE_FLT_FN (BUILT_IN_FABS):
14619 CASE_FLT_FN (BUILT_IN_FDIM):
14620 CASE_FLT_FN (BUILT_IN_HYPOT):
14621 CASE_FLT_FN (BUILT_IN_POW10):
14622 CASE_INT_FN (BUILT_IN_FFS):
14623 CASE_INT_FN (BUILT_IN_PARITY):
14624 CASE_INT_FN (BUILT_IN_POPCOUNT):
14625 case BUILT_IN_BSWAP32:
14626 case BUILT_IN_BSWAP64:
14630 CASE_FLT_FN (BUILT_IN_SQRT):
14631 /* sqrt(-0.0) is -0.0. */
14632 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14634 return tree_expr_nonnegative_warnv_p (arg0,
14635 strict_overflow_p);
14637 CASE_FLT_FN (BUILT_IN_ASINH):
14638 CASE_FLT_FN (BUILT_IN_ATAN):
14639 CASE_FLT_FN (BUILT_IN_ATANH):
14640 CASE_FLT_FN (BUILT_IN_CBRT):
14641 CASE_FLT_FN (BUILT_IN_CEIL):
14642 CASE_FLT_FN (BUILT_IN_ERF):
14643 CASE_FLT_FN (BUILT_IN_EXPM1):
14644 CASE_FLT_FN (BUILT_IN_FLOOR):
14645 CASE_FLT_FN (BUILT_IN_FMOD):
14646 CASE_FLT_FN (BUILT_IN_FREXP):
14647 CASE_FLT_FN (BUILT_IN_LCEIL):
14648 CASE_FLT_FN (BUILT_IN_LDEXP):
14649 CASE_FLT_FN (BUILT_IN_LFLOOR):
14650 CASE_FLT_FN (BUILT_IN_LLCEIL):
14651 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14652 CASE_FLT_FN (BUILT_IN_LLRINT):
14653 CASE_FLT_FN (BUILT_IN_LLROUND):
14654 CASE_FLT_FN (BUILT_IN_LRINT):
14655 CASE_FLT_FN (BUILT_IN_LROUND):
14656 CASE_FLT_FN (BUILT_IN_MODF):
14657 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14658 CASE_FLT_FN (BUILT_IN_RINT):
14659 CASE_FLT_FN (BUILT_IN_ROUND):
14660 CASE_FLT_FN (BUILT_IN_SCALB):
14661 CASE_FLT_FN (BUILT_IN_SCALBLN):
14662 CASE_FLT_FN (BUILT_IN_SCALBN):
14663 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14664 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14665 CASE_FLT_FN (BUILT_IN_SINH):
14666 CASE_FLT_FN (BUILT_IN_TANH):
14667 CASE_FLT_FN (BUILT_IN_TRUNC):
14668 /* True if the 1st argument is nonnegative. */
14669 return tree_expr_nonnegative_warnv_p (arg0,
14670 strict_overflow_p);
14672 CASE_FLT_FN (BUILT_IN_FMAX):
14673 /* True if the 1st OR 2nd arguments are nonnegative. */
14674 return (tree_expr_nonnegative_warnv_p (arg0,
14676 || (tree_expr_nonnegative_warnv_p (arg1,
14677 strict_overflow_p)));
14679 CASE_FLT_FN (BUILT_IN_FMIN):
14680 /* True if the 1st AND 2nd arguments are nonnegative. */
14681 return (tree_expr_nonnegative_warnv_p (arg0,
14683 && (tree_expr_nonnegative_warnv_p (arg1,
14684 strict_overflow_p)));
14686 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14687 /* True if the 2nd argument is nonnegative. */
14688 return tree_expr_nonnegative_warnv_p (arg1,
14689 strict_overflow_p);
14691 CASE_FLT_FN (BUILT_IN_POWI):
14692 /* True if the 1st argument is nonnegative or the second
14693 argument is an even integer. */
14694 if (TREE_CODE (arg1) == INTEGER_CST
14695 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14697 return tree_expr_nonnegative_warnv_p (arg0,
14698 strict_overflow_p);
14700 CASE_FLT_FN (BUILT_IN_POW):
14701 /* True if the 1st argument is nonnegative or the second
14702 argument is an even integer valued real. */
14703 if (TREE_CODE (arg1) == REAL_CST)
14708 c = TREE_REAL_CST (arg1);
14709 n = real_to_integer (&c);
14712 REAL_VALUE_TYPE cint;
14713 real_from_integer (&cint, VOIDmode, n,
14714 n < 0 ? -1 : 0, 0);
14715 if (real_identical (&c, &cint))
14719 return tree_expr_nonnegative_warnv_p (arg0,
14720 strict_overflow_p);
14725 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14729 /* Return true if T is known to be non-negative. If the return
14730 value is based on the assumption that signed overflow is undefined,
14731 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14732 *STRICT_OVERFLOW_P. */
14735 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14737 enum tree_code code = TREE_CODE (t);
14738 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14745 tree temp = TARGET_EXPR_SLOT (t);
14746 t = TARGET_EXPR_INITIAL (t);
14748 /* If the initializer is non-void, then it's a normal expression
14749 that will be assigned to the slot. */
14750 if (!VOID_TYPE_P (t))
14751 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14753 /* Otherwise, the initializer sets the slot in some way. One common
14754 way is an assignment statement at the end of the initializer. */
14757 if (TREE_CODE (t) == BIND_EXPR)
14758 t = expr_last (BIND_EXPR_BODY (t));
14759 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14760 || TREE_CODE (t) == TRY_CATCH_EXPR)
14761 t = expr_last (TREE_OPERAND (t, 0));
14762 else if (TREE_CODE (t) == STATEMENT_LIST)
14767 if (TREE_CODE (t) == MODIFY_EXPR
14768 && TREE_OPERAND (t, 0) == temp)
14769 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14770 strict_overflow_p);
14777 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14778 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14780 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14781 get_callee_fndecl (t),
14784 strict_overflow_p);
14786 case COMPOUND_EXPR:
14788 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14789 strict_overflow_p);
14791 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14792 strict_overflow_p);
14794 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14795 strict_overflow_p);
14798 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14802 /* We don't know sign of `t', so be conservative and return false. */
14806 /* Return true if T is known to be non-negative. If the return
14807 value is based on the assumption that signed overflow is undefined,
14808 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14809 *STRICT_OVERFLOW_P. */
14812 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14814 enum tree_code code;
14815 if (t == error_mark_node)
14818 code = TREE_CODE (t);
14819 switch (TREE_CODE_CLASS (code))
14822 case tcc_comparison:
14823 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14825 TREE_OPERAND (t, 0),
14826 TREE_OPERAND (t, 1),
14827 strict_overflow_p);
14830 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14832 TREE_OPERAND (t, 0),
14833 strict_overflow_p);
14836 case tcc_declaration:
14837 case tcc_reference:
14838 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14846 case TRUTH_AND_EXPR:
14847 case TRUTH_OR_EXPR:
14848 case TRUTH_XOR_EXPR:
14849 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14851 TREE_OPERAND (t, 0),
14852 TREE_OPERAND (t, 1),
14853 strict_overflow_p);
14854 case TRUTH_NOT_EXPR:
14855 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14857 TREE_OPERAND (t, 0),
14858 strict_overflow_p);
14865 case WITH_SIZE_EXPR:
14867 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14870 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14874 /* Return true if `t' is known to be non-negative. Handle warnings
14875 about undefined signed overflow. */
14878 tree_expr_nonnegative_p (tree t)
14880 bool ret, strict_overflow_p;
14882 strict_overflow_p = false;
14883 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14884 if (strict_overflow_p)
14885 fold_overflow_warning (("assuming signed overflow does not occur when "
14886 "determining that expression is always "
14888 WARN_STRICT_OVERFLOW_MISC);
14893 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14894 For floating point we further ensure that T is not denormal.
14895 Similar logic is present in nonzero_address in rtlanal.h.
14897 If the return value is based on the assumption that signed overflow
14898 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14899 change *STRICT_OVERFLOW_P. */
14902 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14903 bool *strict_overflow_p)
14908 return tree_expr_nonzero_warnv_p (op0,
14909 strict_overflow_p);
14913 tree inner_type = TREE_TYPE (op0);
14914 tree outer_type = type;
14916 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14917 && tree_expr_nonzero_warnv_p (op0,
14918 strict_overflow_p));
14922 case NON_LVALUE_EXPR:
14923 return tree_expr_nonzero_warnv_p (op0,
14924 strict_overflow_p);
14933 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14934 For floating point we further ensure that T is not denormal.
14935 Similar logic is present in nonzero_address in rtlanal.h.
14937 If the return value is based on the assumption that signed overflow
14938 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14939 change *STRICT_OVERFLOW_P. */
14942 tree_binary_nonzero_warnv_p (enum tree_code code,
14945 tree op1, bool *strict_overflow_p)
14947 bool sub_strict_overflow_p;
14950 case POINTER_PLUS_EXPR:
14952 if (TYPE_OVERFLOW_UNDEFINED (type))
14954 /* With the presence of negative values it is hard
14955 to say something. */
14956 sub_strict_overflow_p = false;
14957 if (!tree_expr_nonnegative_warnv_p (op0,
14958 &sub_strict_overflow_p)
14959 || !tree_expr_nonnegative_warnv_p (op1,
14960 &sub_strict_overflow_p))
14962 /* One of operands must be positive and the other non-negative. */
14963 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14964 overflows, on a twos-complement machine the sum of two
14965 nonnegative numbers can never be zero. */
14966 return (tree_expr_nonzero_warnv_p (op0,
14968 || tree_expr_nonzero_warnv_p (op1,
14969 strict_overflow_p));
14974 if (TYPE_OVERFLOW_UNDEFINED (type))
14976 if (tree_expr_nonzero_warnv_p (op0,
14978 && tree_expr_nonzero_warnv_p (op1,
14979 strict_overflow_p))
14981 *strict_overflow_p = true;
14988 sub_strict_overflow_p = false;
14989 if (tree_expr_nonzero_warnv_p (op0,
14990 &sub_strict_overflow_p)
14991 && tree_expr_nonzero_warnv_p (op1,
14992 &sub_strict_overflow_p))
14994 if (sub_strict_overflow_p)
14995 *strict_overflow_p = true;
15000 sub_strict_overflow_p = false;
15001 if (tree_expr_nonzero_warnv_p (op0,
15002 &sub_strict_overflow_p))
15004 if (sub_strict_overflow_p)
15005 *strict_overflow_p = true;
15007 /* When both operands are nonzero, then MAX must be too. */
15008 if (tree_expr_nonzero_warnv_p (op1,
15009 strict_overflow_p))
15012 /* MAX where operand 0 is positive is positive. */
15013 return tree_expr_nonnegative_warnv_p (op0,
15014 strict_overflow_p);
15016 /* MAX where operand 1 is positive is positive. */
15017 else if (tree_expr_nonzero_warnv_p (op1,
15018 &sub_strict_overflow_p)
15019 && tree_expr_nonnegative_warnv_p (op1,
15020 &sub_strict_overflow_p))
15022 if (sub_strict_overflow_p)
15023 *strict_overflow_p = true;
15029 return (tree_expr_nonzero_warnv_p (op1,
15031 || tree_expr_nonzero_warnv_p (op0,
15032 strict_overflow_p));
15041 /* Return true when T is an address and is known to be nonzero.
15042 For floating point we further ensure that T is not denormal.
15043 Similar logic is present in nonzero_address in rtlanal.h.
15045 If the return value is based on the assumption that signed overflow
15046 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15047 change *STRICT_OVERFLOW_P. */
15050 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15052 bool sub_strict_overflow_p;
15053 switch (TREE_CODE (t))
15056 return !integer_zerop (t);
15060 tree base = TREE_OPERAND (t, 0);
15061 if (!DECL_P (base))
15062 base = get_base_address (base);
15067 /* Weak declarations may link to NULL. Other things may also be NULL
15068 so protect with -fdelete-null-pointer-checks; but not variables
15069 allocated on the stack. */
15071 && (flag_delete_null_pointer_checks
15072 || (DECL_CONTEXT (base)
15073 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15074 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15075 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15077 /* Constants are never weak. */
15078 if (CONSTANT_CLASS_P (base))
15085 sub_strict_overflow_p = false;
15086 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15087 &sub_strict_overflow_p)
15088 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15089 &sub_strict_overflow_p))
15091 if (sub_strict_overflow_p)
15092 *strict_overflow_p = true;
15103 /* Return true when T is an address and is known to be nonzero.
15104 For floating point we further ensure that T is not denormal.
15105 Similar logic is present in nonzero_address in rtlanal.h.
15107 If the return value is based on the assumption that signed overflow
15108 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15109 change *STRICT_OVERFLOW_P. */
15112 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15114 tree type = TREE_TYPE (t);
15115 enum tree_code code;
15117 /* Doing something useful for floating point would need more work. */
15118 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15121 code = TREE_CODE (t);
15122 switch (TREE_CODE_CLASS (code))
15125 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15126 strict_overflow_p);
15128 case tcc_comparison:
15129 return tree_binary_nonzero_warnv_p (code, type,
15130 TREE_OPERAND (t, 0),
15131 TREE_OPERAND (t, 1),
15132 strict_overflow_p);
15134 case tcc_declaration:
15135 case tcc_reference:
15136 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15144 case TRUTH_NOT_EXPR:
15145 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15146 strict_overflow_p);
15148 case TRUTH_AND_EXPR:
15149 case TRUTH_OR_EXPR:
15150 case TRUTH_XOR_EXPR:
15151 return tree_binary_nonzero_warnv_p (code, type,
15152 TREE_OPERAND (t, 0),
15153 TREE_OPERAND (t, 1),
15154 strict_overflow_p);
15161 case WITH_SIZE_EXPR:
15163 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15165 case COMPOUND_EXPR:
15168 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15169 strict_overflow_p);
15172 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15173 strict_overflow_p);
15176 return alloca_call_p (t);
15184 /* Return true when T is an address and is known to be nonzero.
15185 Handle warnings about undefined signed overflow. */
15188 tree_expr_nonzero_p (tree t)
15190 bool ret, strict_overflow_p;
15192 strict_overflow_p = false;
15193 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15194 if (strict_overflow_p)
15195 fold_overflow_warning (("assuming signed overflow does not occur when "
15196 "determining that expression is always "
15198 WARN_STRICT_OVERFLOW_MISC);
15202 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15203 attempt to fold the expression to a constant without modifying TYPE,
15206 If the expression could be simplified to a constant, then return
15207 the constant. If the expression would not be simplified to a
15208 constant, then return NULL_TREE. */
15211 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15213 tree tem = fold_binary (code, type, op0, op1);
15214 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15217 /* Given the components of a unary expression CODE, TYPE and OP0,
15218 attempt to fold the expression to a constant without modifying
15221 If the expression could be simplified to a constant, then return
15222 the constant. If the expression would not be simplified to a
15223 constant, then return NULL_TREE. */
15226 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15228 tree tem = fold_unary (code, type, op0);
15229 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15232 /* If EXP represents referencing an element in a constant string
15233 (either via pointer arithmetic or array indexing), return the
15234 tree representing the value accessed, otherwise return NULL. */
15237 fold_read_from_constant_string (tree exp)
15239 if ((TREE_CODE (exp) == INDIRECT_REF
15240 || TREE_CODE (exp) == ARRAY_REF)
15241 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15243 tree exp1 = TREE_OPERAND (exp, 0);
15246 location_t loc = EXPR_LOCATION (exp);
15248 if (TREE_CODE (exp) == INDIRECT_REF)
15249 string = string_constant (exp1, &index);
15252 tree low_bound = array_ref_low_bound (exp);
15253 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15255 /* Optimize the special-case of a zero lower bound.
15257 We convert the low_bound to sizetype to avoid some problems
15258 with constant folding. (E.g. suppose the lower bound is 1,
15259 and its mode is QI. Without the conversion,l (ARRAY
15260 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15261 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15262 if (! integer_zerop (low_bound))
15263 index = size_diffop_loc (loc, index,
15264 fold_convert_loc (loc, sizetype, low_bound));
15270 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15271 && TREE_CODE (string) == STRING_CST
15272 && TREE_CODE (index) == INTEGER_CST
15273 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15274 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15276 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15277 return build_int_cst_type (TREE_TYPE (exp),
15278 (TREE_STRING_POINTER (string)
15279 [TREE_INT_CST_LOW (index)]));
15284 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15285 an integer constant, real, or fixed-point constant.
15287 TYPE is the type of the result. */
15290 fold_negate_const (tree arg0, tree type)
15292 tree t = NULL_TREE;
15294 switch (TREE_CODE (arg0))
15298 double_int val = tree_to_double_int (arg0);
15299 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15301 t = force_fit_type_double (type, val, 1,
15302 (overflow | TREE_OVERFLOW (arg0))
15303 && !TYPE_UNSIGNED (type));
15308 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15313 FIXED_VALUE_TYPE f;
15314 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15315 &(TREE_FIXED_CST (arg0)), NULL,
15316 TYPE_SATURATING (type));
15317 t = build_fixed (type, f);
15318 /* Propagate overflow flags. */
15319 if (overflow_p | TREE_OVERFLOW (arg0))
15320 TREE_OVERFLOW (t) = 1;
15325 gcc_unreachable ();
15331 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15332 an integer constant or real constant.
15334 TYPE is the type of the result. */
15337 fold_abs_const (tree arg0, tree type)
15339 tree t = NULL_TREE;
15341 switch (TREE_CODE (arg0))
15345 double_int val = tree_to_double_int (arg0);
15347 /* If the value is unsigned or non-negative, then the absolute value
15348 is the same as the ordinary value. */
15349 if (TYPE_UNSIGNED (type)
15350 || !double_int_negative_p (val))
15353 /* If the value is negative, then the absolute value is
15359 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15360 t = force_fit_type_double (type, val, -1,
15361 overflow | TREE_OVERFLOW (arg0));
15367 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15368 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15374 gcc_unreachable ();
15380 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15381 constant. TYPE is the type of the result. */
15384 fold_not_const (const_tree arg0, tree type)
15388 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15390 val = double_int_not (tree_to_double_int (arg0));
15391 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15394 /* Given CODE, a relational operator, the target type, TYPE and two
15395 constant operands OP0 and OP1, return the result of the
15396 relational operation. If the result is not a compile time
15397 constant, then return NULL_TREE. */
15400 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15402 int result, invert;
15404 /* From here on, the only cases we handle are when the result is
15405 known to be a constant. */
15407 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15409 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15410 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15412 /* Handle the cases where either operand is a NaN. */
15413 if (real_isnan (c0) || real_isnan (c1))
15423 case UNORDERED_EXPR:
15437 if (flag_trapping_math)
15443 gcc_unreachable ();
15446 return constant_boolean_node (result, type);
15449 return constant_boolean_node (real_compare (code, c0, c1), type);
15452 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15454 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15455 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15456 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15459 /* Handle equality/inequality of complex constants. */
15460 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15462 tree rcond = fold_relational_const (code, type,
15463 TREE_REALPART (op0),
15464 TREE_REALPART (op1));
15465 tree icond = fold_relational_const (code, type,
15466 TREE_IMAGPART (op0),
15467 TREE_IMAGPART (op1));
15468 if (code == EQ_EXPR)
15469 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15470 else if (code == NE_EXPR)
15471 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15476 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15478 To compute GT, swap the arguments and do LT.
15479 To compute GE, do LT and invert the result.
15480 To compute LE, swap the arguments, do LT and invert the result.
15481 To compute NE, do EQ and invert the result.
15483 Therefore, the code below must handle only EQ and LT. */
15485 if (code == LE_EXPR || code == GT_EXPR)
15490 code = swap_tree_comparison (code);
15493 /* Note that it is safe to invert for real values here because we
15494 have already handled the one case that it matters. */
15497 if (code == NE_EXPR || code == GE_EXPR)
15500 code = invert_tree_comparison (code, false);
15503 /* Compute a result for LT or EQ if args permit;
15504 Otherwise return T. */
15505 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15507 if (code == EQ_EXPR)
15508 result = tree_int_cst_equal (op0, op1);
15509 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15510 result = INT_CST_LT_UNSIGNED (op0, op1);
15512 result = INT_CST_LT (op0, op1);
15519 return constant_boolean_node (result, type);
15522 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15523 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15527 fold_build_cleanup_point_expr (tree type, tree expr)
15529 /* If the expression does not have side effects then we don't have to wrap
15530 it with a cleanup point expression. */
15531 if (!TREE_SIDE_EFFECTS (expr))
15534 /* If the expression is a return, check to see if the expression inside the
15535 return has no side effects or the right hand side of the modify expression
15536 inside the return. If either don't have side effects set we don't need to
15537 wrap the expression in a cleanup point expression. Note we don't check the
15538 left hand side of the modify because it should always be a return decl. */
15539 if (TREE_CODE (expr) == RETURN_EXPR)
15541 tree op = TREE_OPERAND (expr, 0);
15542 if (!op || !TREE_SIDE_EFFECTS (op))
15544 op = TREE_OPERAND (op, 1);
15545 if (!TREE_SIDE_EFFECTS (op))
15549 return build1 (CLEANUP_POINT_EXPR, type, expr);
15552 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15553 of an indirection through OP0, or NULL_TREE if no simplification is
15557 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15563 subtype = TREE_TYPE (sub);
15564 if (!POINTER_TYPE_P (subtype))
15567 if (TREE_CODE (sub) == ADDR_EXPR)
15569 tree op = TREE_OPERAND (sub, 0);
15570 tree optype = TREE_TYPE (op);
15571 /* *&CONST_DECL -> to the value of the const decl. */
15572 if (TREE_CODE (op) == CONST_DECL)
15573 return DECL_INITIAL (op);
15574 /* *&p => p; make sure to handle *&"str"[cst] here. */
15575 if (type == optype)
15577 tree fop = fold_read_from_constant_string (op);
15583 /* *(foo *)&fooarray => fooarray[0] */
15584 else if (TREE_CODE (optype) == ARRAY_TYPE
15585 && type == TREE_TYPE (optype)
15586 && (!in_gimple_form
15587 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15589 tree type_domain = TYPE_DOMAIN (optype);
15590 tree min_val = size_zero_node;
15591 if (type_domain && TYPE_MIN_VALUE (type_domain))
15592 min_val = TYPE_MIN_VALUE (type_domain);
15594 && TREE_CODE (min_val) != INTEGER_CST)
15596 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15597 NULL_TREE, NULL_TREE);
15599 /* *(foo *)&complexfoo => __real__ complexfoo */
15600 else if (TREE_CODE (optype) == COMPLEX_TYPE
15601 && type == TREE_TYPE (optype))
15602 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15603 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15604 else if (TREE_CODE (optype) == VECTOR_TYPE
15605 && type == TREE_TYPE (optype))
15607 tree part_width = TYPE_SIZE (type);
15608 tree index = bitsize_int (0);
15609 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15613 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15614 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15616 tree op00 = TREE_OPERAND (sub, 0);
15617 tree op01 = TREE_OPERAND (sub, 1);
15620 if (TREE_CODE (op00) == ADDR_EXPR)
15623 op00 = TREE_OPERAND (op00, 0);
15624 op00type = TREE_TYPE (op00);
15626 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15627 if (TREE_CODE (op00type) == VECTOR_TYPE
15628 && type == TREE_TYPE (op00type))
15630 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15631 tree part_width = TYPE_SIZE (type);
15632 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15633 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15634 tree index = bitsize_int (indexi);
15636 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15637 return fold_build3_loc (loc,
15638 BIT_FIELD_REF, type, op00,
15639 part_width, index);
15642 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15643 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15644 && type == TREE_TYPE (op00type))
15646 tree size = TYPE_SIZE_UNIT (type);
15647 if (tree_int_cst_equal (size, op01))
15648 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15650 /* ((foo *)&fooarray)[1] => fooarray[1] */
15651 else if (TREE_CODE (op00type) == ARRAY_TYPE
15652 && type == TREE_TYPE (op00type))
15654 tree type_domain = TYPE_DOMAIN (op00type);
15655 tree min_val = size_zero_node;
15656 if (type_domain && TYPE_MIN_VALUE (type_domain))
15657 min_val = TYPE_MIN_VALUE (type_domain);
15658 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15659 TYPE_SIZE_UNIT (type));
15660 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15661 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15662 NULL_TREE, NULL_TREE);
15667 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15668 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15669 && type == TREE_TYPE (TREE_TYPE (subtype))
15670 && (!in_gimple_form
15671 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15674 tree min_val = size_zero_node;
15675 sub = build_fold_indirect_ref_loc (loc, sub);
15676 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15677 if (type_domain && TYPE_MIN_VALUE (type_domain))
15678 min_val = TYPE_MIN_VALUE (type_domain);
15680 && TREE_CODE (min_val) != INTEGER_CST)
15682 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15689 /* Builds an expression for an indirection through T, simplifying some
15693 build_fold_indirect_ref_loc (location_t loc, tree t)
15695 tree type = TREE_TYPE (TREE_TYPE (t));
15696 tree sub = fold_indirect_ref_1 (loc, type, t);
15701 return build1_loc (loc, INDIRECT_REF, type, t);
15704 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15707 fold_indirect_ref_loc (location_t loc, tree t)
15709 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15717 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15718 whose result is ignored. The type of the returned tree need not be
15719 the same as the original expression. */
15722 fold_ignored_result (tree t)
15724 if (!TREE_SIDE_EFFECTS (t))
15725 return integer_zero_node;
15728 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15731 t = TREE_OPERAND (t, 0);
15735 case tcc_comparison:
15736 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15737 t = TREE_OPERAND (t, 0);
15738 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15739 t = TREE_OPERAND (t, 1);
15744 case tcc_expression:
15745 switch (TREE_CODE (t))
15747 case COMPOUND_EXPR:
15748 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15750 t = TREE_OPERAND (t, 0);
15754 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15755 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15757 t = TREE_OPERAND (t, 0);
15770 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15771 This can only be applied to objects of a sizetype. */
15774 round_up_loc (location_t loc, tree value, int divisor)
15776 tree div = NULL_TREE;
15778 gcc_assert (divisor > 0);
15782 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15783 have to do anything. Only do this when we are not given a const,
15784 because in that case, this check is more expensive than just
15786 if (TREE_CODE (value) != INTEGER_CST)
15788 div = build_int_cst (TREE_TYPE (value), divisor);
15790 if (multiple_of_p (TREE_TYPE (value), value, div))
15794 /* If divisor is a power of two, simplify this to bit manipulation. */
15795 if (divisor == (divisor & -divisor))
15797 if (TREE_CODE (value) == INTEGER_CST)
15799 double_int val = tree_to_double_int (value);
15802 if ((val.low & (divisor - 1)) == 0)
15805 overflow_p = TREE_OVERFLOW (value);
15806 val.low &= ~(divisor - 1);
15807 val.low += divisor;
15815 return force_fit_type_double (TREE_TYPE (value), val,
15822 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15823 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15824 t = build_int_cst (TREE_TYPE (value), -divisor);
15825 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15831 div = build_int_cst (TREE_TYPE (value), divisor);
15832 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15833 value = size_binop_loc (loc, MULT_EXPR, value, div);
15839 /* Likewise, but round down. */
15842 round_down_loc (location_t loc, tree value, int divisor)
15844 tree div = NULL_TREE;
15846 gcc_assert (divisor > 0);
15850 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15851 have to do anything. Only do this when we are not given a const,
15852 because in that case, this check is more expensive than just
15854 if (TREE_CODE (value) != INTEGER_CST)
15856 div = build_int_cst (TREE_TYPE (value), divisor);
15858 if (multiple_of_p (TREE_TYPE (value), value, div))
15862 /* If divisor is a power of two, simplify this to bit manipulation. */
15863 if (divisor == (divisor & -divisor))
15867 t = build_int_cst (TREE_TYPE (value), -divisor);
15868 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15873 div = build_int_cst (TREE_TYPE (value), divisor);
15874 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15875 value = size_binop_loc (loc, MULT_EXPR, value, div);
15881 /* Returns the pointer to the base of the object addressed by EXP and
15882 extracts the information about the offset of the access, storing it
15883 to PBITPOS and POFFSET. */
15886 split_address_to_core_and_offset (tree exp,
15887 HOST_WIDE_INT *pbitpos, tree *poffset)
15890 enum machine_mode mode;
15891 int unsignedp, volatilep;
15892 HOST_WIDE_INT bitsize;
15893 location_t loc = EXPR_LOCATION (exp);
15895 if (TREE_CODE (exp) == ADDR_EXPR)
15897 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15898 poffset, &mode, &unsignedp, &volatilep,
15900 core = build_fold_addr_expr_loc (loc, core);
15906 *poffset = NULL_TREE;
15912 /* Returns true if addresses of E1 and E2 differ by a constant, false
15913 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15916 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15919 HOST_WIDE_INT bitpos1, bitpos2;
15920 tree toffset1, toffset2, tdiff, type;
15922 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15923 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15925 if (bitpos1 % BITS_PER_UNIT != 0
15926 || bitpos2 % BITS_PER_UNIT != 0
15927 || !operand_equal_p (core1, core2, 0))
15930 if (toffset1 && toffset2)
15932 type = TREE_TYPE (toffset1);
15933 if (type != TREE_TYPE (toffset2))
15934 toffset2 = fold_convert (type, toffset2);
15936 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15937 if (!cst_and_fits_in_hwi (tdiff))
15940 *diff = int_cst_value (tdiff);
15942 else if (toffset1 || toffset2)
15944 /* If only one of the offsets is non-constant, the difference cannot
15951 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15955 /* Simplify the floating point expression EXP when the sign of the
15956 result is not significant. Return NULL_TREE if no simplification
15960 fold_strip_sign_ops (tree exp)
15963 location_t loc = EXPR_LOCATION (exp);
15965 switch (TREE_CODE (exp))
15969 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15970 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15974 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15976 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15977 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15978 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15979 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15980 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15981 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15984 case COMPOUND_EXPR:
15985 arg0 = TREE_OPERAND (exp, 0);
15986 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15988 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15992 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15993 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15995 return fold_build3_loc (loc,
15996 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15997 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15998 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16003 const enum built_in_function fcode = builtin_mathfn_code (exp);
16006 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16007 /* Strip copysign function call, return the 1st argument. */
16008 arg0 = CALL_EXPR_ARG (exp, 0);
16009 arg1 = CALL_EXPR_ARG (exp, 1);
16010 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16013 /* Strip sign ops from the argument of "odd" math functions. */
16014 if (negate_mathfn_p (fcode))
16016 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16018 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);