1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc != UNKNOWN_LOCATION ? tloc : loc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
167 SET_EXPR_LOCATION (x, loc);
173 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
174 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
175 and SUM1. Then this yields nonzero if overflow occurred during the
178 Overflow occurs if A and B have the same sign, but A and SUM differ in
179 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
181 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
183 /* If ARG2 divides ARG1 with zero remainder, carries out the division
184 of type CODE and returns the quotient.
185 Otherwise returns NULL_TREE. */
188 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
193 /* The sign of the division is according to operand two, that
194 does the correct thing for POINTER_PLUS_EXPR where we want
195 a signed division. */
196 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
197 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
198 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
201 quo = double_int_divmod (tree_to_double_int (arg1),
202 tree_to_double_int (arg2),
205 if (double_int_zero_p (rem))
206 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
211 /* This is nonzero if we should defer warnings about undefined
212 overflow. This facility exists because these warnings are a
213 special case. The code to estimate loop iterations does not want
214 to issue any warnings, since it works with expressions which do not
215 occur in user code. Various bits of cleanup code call fold(), but
216 only use the result if it has certain characteristics (e.g., is a
217 constant); that code only wants to issue a warning if the result is
220 static int fold_deferring_overflow_warnings;
222 /* If a warning about undefined overflow is deferred, this is the
223 warning. Note that this may cause us to turn two warnings into
224 one, but that is fine since it is sufficient to only give one
225 warning per expression. */
227 static const char* fold_deferred_overflow_warning;
229 /* If a warning about undefined overflow is deferred, this is the
230 level at which the warning should be emitted. */
232 static enum warn_strict_overflow_code fold_deferred_overflow_code;
234 /* Start deferring overflow warnings. We could use a stack here to
235 permit nested calls, but at present it is not necessary. */
238 fold_defer_overflow_warnings (void)
240 ++fold_deferring_overflow_warnings;
243 /* Stop deferring overflow warnings. If there is a pending warning,
244 and ISSUE is true, then issue the warning if appropriate. STMT is
245 the statement with which the warning should be associated (used for
246 location information); STMT may be NULL. CODE is the level of the
247 warning--a warn_strict_overflow_code value. This function will use
248 the smaller of CODE and the deferred code when deciding whether to
249 issue the warning. CODE may be zero to mean to always use the
253 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
258 gcc_assert (fold_deferring_overflow_warnings > 0);
259 --fold_deferring_overflow_warnings;
260 if (fold_deferring_overflow_warnings > 0)
262 if (fold_deferred_overflow_warning != NULL
264 && code < (int) fold_deferred_overflow_code)
265 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
269 warnmsg = fold_deferred_overflow_warning;
270 fold_deferred_overflow_warning = NULL;
272 if (!issue || warnmsg == NULL)
275 if (gimple_no_warning_p (stmt))
278 /* Use the smallest code level when deciding to issue the
280 if (code == 0 || code > (int) fold_deferred_overflow_code)
281 code = fold_deferred_overflow_code;
283 if (!issue_strict_overflow_warning (code))
287 locus = input_location;
289 locus = gimple_location (stmt);
290 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
293 /* Stop deferring overflow warnings, ignoring any deferred
297 fold_undefer_and_ignore_overflow_warnings (void)
299 fold_undefer_overflow_warnings (false, NULL, 0);
302 /* Whether we are deferring overflow warnings. */
305 fold_deferring_overflow_warnings_p (void)
307 return fold_deferring_overflow_warnings > 0;
310 /* This is called when we fold something based on the fact that signed
311 overflow is undefined. */
314 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
316 if (fold_deferring_overflow_warnings > 0)
318 if (fold_deferred_overflow_warning == NULL
319 || wc < fold_deferred_overflow_code)
321 fold_deferred_overflow_warning = gmsgid;
322 fold_deferred_overflow_code = wc;
325 else if (issue_strict_overflow_warning (wc))
326 warning (OPT_Wstrict_overflow, gmsgid);
329 /* Return true if the built-in mathematical function specified by CODE
330 is odd, i.e. -f(x) == f(-x). */
333 negate_mathfn_p (enum built_in_function code)
337 CASE_FLT_FN (BUILT_IN_ASIN):
338 CASE_FLT_FN (BUILT_IN_ASINH):
339 CASE_FLT_FN (BUILT_IN_ATAN):
340 CASE_FLT_FN (BUILT_IN_ATANH):
341 CASE_FLT_FN (BUILT_IN_CASIN):
342 CASE_FLT_FN (BUILT_IN_CASINH):
343 CASE_FLT_FN (BUILT_IN_CATAN):
344 CASE_FLT_FN (BUILT_IN_CATANH):
345 CASE_FLT_FN (BUILT_IN_CBRT):
346 CASE_FLT_FN (BUILT_IN_CPROJ):
347 CASE_FLT_FN (BUILT_IN_CSIN):
348 CASE_FLT_FN (BUILT_IN_CSINH):
349 CASE_FLT_FN (BUILT_IN_CTAN):
350 CASE_FLT_FN (BUILT_IN_CTANH):
351 CASE_FLT_FN (BUILT_IN_ERF):
352 CASE_FLT_FN (BUILT_IN_LLROUND):
353 CASE_FLT_FN (BUILT_IN_LROUND):
354 CASE_FLT_FN (BUILT_IN_ROUND):
355 CASE_FLT_FN (BUILT_IN_SIN):
356 CASE_FLT_FN (BUILT_IN_SINH):
357 CASE_FLT_FN (BUILT_IN_TAN):
358 CASE_FLT_FN (BUILT_IN_TANH):
359 CASE_FLT_FN (BUILT_IN_TRUNC):
362 CASE_FLT_FN (BUILT_IN_LLRINT):
363 CASE_FLT_FN (BUILT_IN_LRINT):
364 CASE_FLT_FN (BUILT_IN_NEARBYINT):
365 CASE_FLT_FN (BUILT_IN_RINT):
366 return !flag_rounding_math;
374 /* Check whether we may negate an integer constant T without causing
378 may_negate_without_overflow_p (const_tree t)
380 unsigned HOST_WIDE_INT val;
384 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386 type = TREE_TYPE (t);
387 if (TYPE_UNSIGNED (type))
390 prec = TYPE_PRECISION (type);
391 if (prec > HOST_BITS_PER_WIDE_INT)
393 if (TREE_INT_CST_LOW (t) != 0)
395 prec -= HOST_BITS_PER_WIDE_INT;
396 val = TREE_INT_CST_HIGH (t);
399 val = TREE_INT_CST_LOW (t);
400 if (prec < HOST_BITS_PER_WIDE_INT)
401 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
402 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
405 /* Determine whether an expression T can be cheaply negated using
406 the function negate_expr without introducing undefined overflow. */
409 negate_expr_p (tree t)
416 type = TREE_TYPE (t);
419 switch (TREE_CODE (t))
422 if (TYPE_OVERFLOW_WRAPS (type))
425 /* Check that -CST will not overflow type. */
426 return may_negate_without_overflow_p (t);
428 return (INTEGRAL_TYPE_P (type)
429 && TYPE_OVERFLOW_WRAPS (type));
436 /* We want to canonicalize to positive real constants. Pretend
437 that only negative ones can be easily negated. */
438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
441 return negate_expr_p (TREE_REALPART (t))
442 && negate_expr_p (TREE_IMAGPART (t));
445 return negate_expr_p (TREE_OPERAND (t, 0))
446 && negate_expr_p (TREE_OPERAND (t, 1));
449 return negate_expr_p (TREE_OPERAND (t, 0));
452 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
453 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
455 /* -(A + B) -> (-B) - A. */
456 if (negate_expr_p (TREE_OPERAND (t, 1))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1)))
460 /* -(A + B) -> (-A) - B. */
461 return negate_expr_p (TREE_OPERAND (t, 0));
464 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
465 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
467 && reorder_operands_p (TREE_OPERAND (t, 0),
468 TREE_OPERAND (t, 1));
471 if (TYPE_UNSIGNED (TREE_TYPE (t)))
477 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
478 return negate_expr_p (TREE_OPERAND (t, 1))
479 || negate_expr_p (TREE_OPERAND (t, 0));
487 /* In general we can't negate A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. But if overflow is
490 undefined, we can negate, because - (INT_MIN / 1) is an
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
495 return negate_expr_p (TREE_OPERAND (t, 1))
496 || negate_expr_p (TREE_OPERAND (t, 0));
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
504 return negate_expr_p (tem);
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (builtin_mathfn_code (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (TREE_INT_CST_HIGH (op1) == 0
520 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
521 == TREE_INT_CST_LOW (op1))
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_int_cst (type, 1));
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 tem = fold_negate_const (t, type);
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 return TREE_OPERAND (t, 0);
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1))
605 && reorder_operands_p (TREE_OPERAND (t, 0),
606 TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
627 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
633 if (TYPE_UNSIGNED (type))
639 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
643 return fold_build2_loc (loc, TREE_CODE (t), type,
644 TREE_OPERAND (t, 0), negate_expr (tem));
645 tem = TREE_OPERAND (t, 0);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 negate_expr (tem), TREE_OPERAND (t, 1));
657 /* In general we can't negate A / B, because if A is INT_MIN and
658 B is 1, we may turn this into INT_MIN / -1 which is undefined
659 and actually traps on some architectures. But if overflow is
660 undefined, we can negate, because - (INT_MIN / 1) is an
662 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
664 const char * const warnmsg = G_("assuming signed overflow does not "
665 "occur when negating a division");
666 tem = TREE_OPERAND (t, 1);
667 if (negate_expr_p (tem))
669 if (INTEGRAL_TYPE_P (type)
670 && (TREE_CODE (tem) != INTEGER_CST
671 || integer_onep (tem)))
672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 negate_expr (tem), TREE_OPERAND (t, 1));
690 /* Convert -((double)float) into (double)(-float). */
691 if (TREE_CODE (type) == REAL_TYPE)
693 tem = strip_float_extensions (t);
694 if (tem != t && negate_expr_p (tem))
695 return fold_convert_loc (loc, type, negate_expr (tem));
700 /* Negate -f(x) as f(-x). */
701 if (negate_mathfn_p (builtin_mathfn_code (t))
702 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
706 fndecl = get_callee_fndecl (t);
707 arg = negate_expr (CALL_EXPR_ARG (t, 0));
708 return build_call_expr_loc (loc, fndecl, 1, arg);
713 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
714 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
716 tree op1 = TREE_OPERAND (t, 1);
717 if (TREE_INT_CST_HIGH (op1) == 0
718 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
719 == TREE_INT_CST_LOW (op1))
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
755 tem = fold_negate_expr (loc, t);
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
782 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
783 tree *minus_litp, int negate_p)
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
836 *minus_litp = *litp, *litp = 0;
838 *conp = negate_expr (*conp);
840 var = negate_expr (var);
842 else if (TREE_CONSTANT (in))
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
916 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time. */
942 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
944 double_int op1, op2, res, tmp;
946 tree type = TREE_TYPE (arg1);
947 bool uns = TYPE_UNSIGNED (type);
949 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
950 bool overflow = false;
952 op1 = tree_to_double_int (arg1);
953 op2 = tree_to_double_int (arg2);
958 res = double_int_ior (op1, op2);
962 res = double_int_xor (op1, op2);
966 res = double_int_and (op1, op2);
970 res = double_int_rshift (op1, double_int_to_shwi (op2),
971 TYPE_PRECISION (type), !uns);
975 /* It's unclear from the C standard whether shifts can overflow.
976 The following code ignores overflow; perhaps a C standard
977 interpretation ruling is needed. */
978 res = double_int_lshift (op1, double_int_to_shwi (op2),
979 TYPE_PRECISION (type), !uns);
983 res = double_int_rrotate (op1, double_int_to_shwi (op2),
984 TYPE_PRECISION (type));
988 res = double_int_lrotate (op1, double_int_to_shwi (op2),
989 TYPE_PRECISION (type));
993 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
994 &res.low, &res.high);
998 neg_double (op2.low, op2.high, &res.low, &res.high);
999 add_double (op1.low, op1.high, res.low, res.high,
1000 &res.low, &res.high);
1001 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1005 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1006 &res.low, &res.high);
1009 case TRUNC_DIV_EXPR:
1010 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1011 case EXACT_DIV_EXPR:
1012 /* This is a shortcut for a common special case. */
1013 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1014 && !TREE_OVERFLOW (arg1)
1015 && !TREE_OVERFLOW (arg2)
1016 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1018 if (code == CEIL_DIV_EXPR)
1019 op1.low += op2.low - 1;
1021 res.low = op1.low / op2.low, res.high = 0;
1025 /* ... fall through ... */
1027 case ROUND_DIV_EXPR:
1028 if (double_int_zero_p (op2))
1030 if (double_int_one_p (op2))
1035 if (double_int_equal_p (op1, op2)
1036 && ! double_int_zero_p (op1))
1038 res = double_int_one;
1041 overflow = div_and_round_double (code, uns,
1042 op1.low, op1.high, op2.low, op2.high,
1043 &res.low, &res.high,
1044 &tmp.low, &tmp.high);
1047 case TRUNC_MOD_EXPR:
1048 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1049 /* This is a shortcut for a common special case. */
1050 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1051 && !TREE_OVERFLOW (arg1)
1052 && !TREE_OVERFLOW (arg2)
1053 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1055 if (code == CEIL_MOD_EXPR)
1056 op1.low += op2.low - 1;
1057 res.low = op1.low % op2.low, res.high = 0;
1061 /* ... fall through ... */
1063 case ROUND_MOD_EXPR:
1064 if (double_int_zero_p (op2))
1066 overflow = div_and_round_double (code, uns,
1067 op1.low, op1.high, op2.low, op2.high,
1068 &tmp.low, &tmp.high,
1069 &res.low, &res.high);
1073 res = double_int_min (op1, op2, uns);
1077 res = double_int_max (op1, op2, uns);
1084 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1085 ((!uns || is_sizetype) && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1091 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1092 constant. We assume ARG1 and ARG2 have the same data type, or at least
1093 are the same kind of constant and the same machine mode. Return zero if
1094 combining the constants is not allowed in the current operating mode. */
1097 const_binop (enum tree_code code, tree arg1, tree arg2)
1099 /* Sanity check for the recursive cases. */
1106 if (TREE_CODE (arg1) == INTEGER_CST)
1107 return int_const_binop (code, arg1, arg2);
1109 if (TREE_CODE (arg1) == REAL_CST)
1111 enum machine_mode mode;
1114 REAL_VALUE_TYPE value;
1115 REAL_VALUE_TYPE result;
1119 /* The following codes are handled by real_arithmetic. */
1134 d1 = TREE_REAL_CST (arg1);
1135 d2 = TREE_REAL_CST (arg2);
1137 type = TREE_TYPE (arg1);
1138 mode = TYPE_MODE (type);
1140 /* Don't perform operation if we honor signaling NaNs and
1141 either operand is a NaN. */
1142 if (HONOR_SNANS (mode)
1143 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1146 /* Don't perform operation if it would raise a division
1147 by zero exception. */
1148 if (code == RDIV_EXPR
1149 && REAL_VALUES_EQUAL (d2, dconst0)
1150 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1153 /* If either operand is a NaN, just return it. Otherwise, set up
1154 for floating-point trap; we return an overflow. */
1155 if (REAL_VALUE_ISNAN (d1))
1157 else if (REAL_VALUE_ISNAN (d2))
1160 inexact = real_arithmetic (&value, code, &d1, &d2);
1161 real_convert (&result, mode, &value);
1163 /* Don't constant fold this floating point operation if
1164 the result has overflowed and flag_trapping_math. */
1165 if (flag_trapping_math
1166 && MODE_HAS_INFINITIES (mode)
1167 && REAL_VALUE_ISINF (result)
1168 && !REAL_VALUE_ISINF (d1)
1169 && !REAL_VALUE_ISINF (d2))
1172 /* Don't constant fold this floating point operation if the
1173 result may dependent upon the run-time rounding mode and
1174 flag_rounding_math is set, or if GCC's software emulation
1175 is unable to accurately represent the result. */
1176 if ((flag_rounding_math
1177 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1178 && (inexact || !real_identical (&result, &value)))
1181 t = build_real (type, result);
1183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1187 if (TREE_CODE (arg1) == FIXED_CST)
1189 FIXED_VALUE_TYPE f1;
1190 FIXED_VALUE_TYPE f2;
1191 FIXED_VALUE_TYPE result;
1196 /* The following codes are handled by fixed_arithmetic. */
1202 case TRUNC_DIV_EXPR:
1203 f2 = TREE_FIXED_CST (arg2);
1208 f2.data.high = TREE_INT_CST_HIGH (arg2);
1209 f2.data.low = TREE_INT_CST_LOW (arg2);
1217 f1 = TREE_FIXED_CST (arg1);
1218 type = TREE_TYPE (arg1);
1219 sat_p = TYPE_SATURATING (type);
1220 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1221 t = build_fixed (type, result);
1222 /* Propagate overflow flags. */
1223 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1224 TREE_OVERFLOW (t) = 1;
1228 if (TREE_CODE (arg1) == COMPLEX_CST)
1230 tree type = TREE_TYPE (arg1);
1231 tree r1 = TREE_REALPART (arg1);
1232 tree i1 = TREE_IMAGPART (arg1);
1233 tree r2 = TREE_REALPART (arg2);
1234 tree i2 = TREE_IMAGPART (arg2);
1241 real = const_binop (code, r1, r2);
1242 imag = const_binop (code, i1, i2);
1246 if (COMPLEX_FLOAT_TYPE_P (type))
1247 return do_mpc_arg2 (arg1, arg2, type,
1248 /* do_nonfinite= */ folding_initializer,
1251 real = const_binop (MINUS_EXPR,
1252 const_binop (MULT_EXPR, r1, r2),
1253 const_binop (MULT_EXPR, i1, i2));
1254 imag = const_binop (PLUS_EXPR,
1255 const_binop (MULT_EXPR, r1, i2),
1256 const_binop (MULT_EXPR, i1, r2));
1260 if (COMPLEX_FLOAT_TYPE_P (type))
1261 return do_mpc_arg2 (arg1, arg2, type,
1262 /* do_nonfinite= */ folding_initializer,
1265 case TRUNC_DIV_EXPR:
1267 case FLOOR_DIV_EXPR:
1268 case ROUND_DIV_EXPR:
1269 if (flag_complex_method == 0)
1271 /* Keep this algorithm in sync with
1272 tree-complex.c:expand_complex_div_straight().
1274 Expand complex division to scalars, straightforward algorithm.
1275 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1279 = const_binop (PLUS_EXPR,
1280 const_binop (MULT_EXPR, r2, r2),
1281 const_binop (MULT_EXPR, i2, i2));
1283 = const_binop (PLUS_EXPR,
1284 const_binop (MULT_EXPR, r1, r2),
1285 const_binop (MULT_EXPR, i1, i2));
1287 = const_binop (MINUS_EXPR,
1288 const_binop (MULT_EXPR, i1, r2),
1289 const_binop (MULT_EXPR, r1, i2));
1291 real = const_binop (code, t1, magsquared);
1292 imag = const_binop (code, t2, magsquared);
1296 /* Keep this algorithm in sync with
1297 tree-complex.c:expand_complex_div_wide().
1299 Expand complex division to scalars, modified algorithm to minimize
1300 overflow with wide input ranges. */
1301 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1302 fold_abs_const (r2, TREE_TYPE (type)),
1303 fold_abs_const (i2, TREE_TYPE (type)));
1305 if (integer_nonzerop (compare))
1307 /* In the TRUE branch, we compute
1309 div = (br * ratio) + bi;
1310 tr = (ar * ratio) + ai;
1311 ti = (ai * ratio) - ar;
1314 tree ratio = const_binop (code, r2, i2);
1315 tree div = const_binop (PLUS_EXPR, i2,
1316 const_binop (MULT_EXPR, r2, ratio));
1317 real = const_binop (MULT_EXPR, r1, ratio);
1318 real = const_binop (PLUS_EXPR, real, i1);
1319 real = const_binop (code, real, div);
1321 imag = const_binop (MULT_EXPR, i1, ratio);
1322 imag = const_binop (MINUS_EXPR, imag, r1);
1323 imag = const_binop (code, imag, div);
1327 /* In the FALSE branch, we compute
1329 divisor = (d * ratio) + c;
1330 tr = (b * ratio) + a;
1331 ti = b - (a * ratio);
1334 tree ratio = const_binop (code, i2, r2);
1335 tree div = const_binop (PLUS_EXPR, r2,
1336 const_binop (MULT_EXPR, i2, ratio));
1338 real = const_binop (MULT_EXPR, i1, ratio);
1339 real = const_binop (PLUS_EXPR, real, r1);
1340 real = const_binop (code, real, div);
1342 imag = const_binop (MULT_EXPR, r1, ratio);
1343 imag = const_binop (MINUS_EXPR, i1, imag);
1344 imag = const_binop (code, imag, div);
1354 return build_complex (type, real, imag);
1357 if (TREE_CODE (arg1) == VECTOR_CST)
1359 tree type = TREE_TYPE(arg1);
1360 int count = TYPE_VECTOR_SUBPARTS (type), i;
1361 tree elements1, elements2, list = NULL_TREE;
1363 if(TREE_CODE(arg2) != VECTOR_CST)
1366 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1367 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1369 for (i = 0; i < count; i++)
1371 tree elem1, elem2, elem;
1373 /* The trailing elements can be empty and should be treated as 0 */
1375 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1378 elem1 = TREE_VALUE(elements1);
1379 elements1 = TREE_CHAIN (elements1);
1383 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1386 elem2 = TREE_VALUE(elements2);
1387 elements2 = TREE_CHAIN (elements2);
1390 elem = const_binop (code, elem1, elem2);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if(elem == NULL_TREE)
1397 list = tree_cons (NULL_TREE, elem, list);
1399 return build_vector(type, nreverse(list));
1404 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1405 indicates which particular sizetype to create. */
1408 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1410 return build_int_cst (sizetype_tab[(int) kind], number);
1413 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1414 is a tree code. The type of the result is taken from the operands.
1415 Both must be equivalent integer types, ala int_binop_types_match_p.
1416 If the operands are constant, so is the result. */
1419 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1421 tree type = TREE_TYPE (arg0);
1423 if (arg0 == error_mark_node || arg1 == error_mark_node)
1424 return error_mark_node;
1426 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1429 /* Handle the special case of two integer constants faster. */
1430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1432 /* And some specific cases even faster than that. */
1433 if (code == PLUS_EXPR)
1435 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1437 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1440 else if (code == MINUS_EXPR)
1442 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1445 else if (code == MULT_EXPR)
1447 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1451 /* Handle general case of two integer constants. */
1452 return int_const_binop (code, arg0, arg1);
1455 return fold_build2_loc (loc, code, type, arg0, arg1);
1458 /* Given two values, either both of sizetype or both of bitsizetype,
1459 compute the difference between the two values. Return the value
1460 in signed type corresponding to the type of the operands. */
1463 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1465 tree type = TREE_TYPE (arg0);
1468 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1471 /* If the type is already signed, just do the simple thing. */
1472 if (!TYPE_UNSIGNED (type))
1473 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1475 if (type == sizetype)
1477 else if (type == bitsizetype)
1478 ctype = sbitsizetype;
1480 ctype = signed_type_for (type);
1482 /* If either operand is not a constant, do the conversions to the signed
1483 type and subtract. The hardware will do the right thing with any
1484 overflow in the subtraction. */
1485 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1486 return size_binop_loc (loc, MINUS_EXPR,
1487 fold_convert_loc (loc, ctype, arg0),
1488 fold_convert_loc (loc, ctype, arg1));
1490 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1491 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1492 overflow) and negate (which can't either). Special-case a result
1493 of zero while we're here. */
1494 if (tree_int_cst_equal (arg0, arg1))
1495 return build_int_cst (ctype, 0);
1496 else if (tree_int_cst_lt (arg1, arg0))
1497 return fold_convert_loc (loc, ctype,
1498 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1500 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1501 fold_convert_loc (loc, ctype,
1502 size_binop_loc (loc,
1507 /* A subroutine of fold_convert_const handling conversions of an
1508 INTEGER_CST to another integer type. */
1511 fold_convert_const_int_from_int (tree type, const_tree arg1)
1515 /* Given an integer constant, make new constant with new type,
1516 appropriately sign-extended or truncated. */
1517 t = force_fit_type_double (type, tree_to_double_int (arg1),
1518 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1519 (TREE_INT_CST_HIGH (arg1) < 0
1520 && (TYPE_UNSIGNED (type)
1521 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1522 | TREE_OVERFLOW (arg1));
1527 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1528 to an integer type. */
1531 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1536 /* The following code implements the floating point to integer
1537 conversion rules required by the Java Language Specification,
1538 that IEEE NaNs are mapped to zero and values that overflow
1539 the target precision saturate, i.e. values greater than
1540 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1541 are mapped to INT_MIN. These semantics are allowed by the
1542 C and C++ standards that simply state that the behavior of
1543 FP-to-integer conversion is unspecified upon overflow. */
1547 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1551 case FIX_TRUNC_EXPR:
1552 real_trunc (&r, VOIDmode, &x);
1559 /* If R is NaN, return zero and show we have an overflow. */
1560 if (REAL_VALUE_ISNAN (r))
1563 val = double_int_zero;
1566 /* See if R is less than the lower bound or greater than the
1571 tree lt = TYPE_MIN_VALUE (type);
1572 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1573 if (REAL_VALUES_LESS (r, l))
1576 val = tree_to_double_int (lt);
1582 tree ut = TYPE_MAX_VALUE (type);
1585 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1586 if (REAL_VALUES_LESS (u, r))
1589 val = tree_to_double_int (ut);
1595 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1597 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1601 /* A subroutine of fold_convert_const handling conversions of a
1602 FIXED_CST to an integer type. */
1605 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1608 double_int temp, temp_trunc;
1611 /* Right shift FIXED_CST to temp by fbit. */
1612 temp = TREE_FIXED_CST (arg1).data;
1613 mode = TREE_FIXED_CST (arg1).mode;
1614 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1616 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1617 HOST_BITS_PER_DOUBLE_INT,
1618 SIGNED_FIXED_POINT_MODE_P (mode));
1620 /* Left shift temp to temp_trunc by fbit. */
1621 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1622 HOST_BITS_PER_DOUBLE_INT,
1623 SIGNED_FIXED_POINT_MODE_P (mode));
1627 temp = double_int_zero;
1628 temp_trunc = double_int_zero;
1631 /* If FIXED_CST is negative, we need to round the value toward 0.
1632 By checking if the fractional bits are not zero to add 1 to temp. */
1633 if (SIGNED_FIXED_POINT_MODE_P (mode)
1634 && double_int_negative_p (temp_trunc)
1635 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1636 temp = double_int_add (temp, double_int_one);
1638 /* Given a fixed-point constant, make new constant with new type,
1639 appropriately sign-extended or truncated. */
1640 t = force_fit_type_double (type, temp, -1,
1641 (double_int_negative_p (temp)
1642 && (TYPE_UNSIGNED (type)
1643 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1644 | TREE_OVERFLOW (arg1));
1649 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1650 to another floating point type. */
1653 fold_convert_const_real_from_real (tree type, const_tree arg1)
1655 REAL_VALUE_TYPE value;
1658 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1659 t = build_real (type, value);
1661 /* If converting an infinity or NAN to a representation that doesn't
1662 have one, set the overflow bit so that we can produce some kind of
1663 error message at the appropriate point if necessary. It's not the
1664 most user-friendly message, but it's better than nothing. */
1665 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1669 && !MODE_HAS_NANS (TYPE_MODE (type)))
1670 TREE_OVERFLOW (t) = 1;
1671 /* Regular overflow, conversion produced an infinity in a mode that
1672 can't represent them. */
1673 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1674 && REAL_VALUE_ISINF (value)
1675 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1676 TREE_OVERFLOW (t) = 1;
1678 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1682 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1683 to a floating point type. */
1686 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1688 REAL_VALUE_TYPE value;
1691 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1692 t = build_real (type, value);
1694 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1698 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1699 to another fixed-point type. */
1702 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1704 FIXED_VALUE_TYPE value;
1708 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1709 TYPE_SATURATING (type));
1710 t = build_fixed (type, value);
1712 /* Propagate overflow flags. */
1713 if (overflow_p | TREE_OVERFLOW (arg1))
1714 TREE_OVERFLOW (t) = 1;
1718 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1719 to a fixed-point type. */
1722 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1724 FIXED_VALUE_TYPE value;
1728 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1729 TREE_INT_CST (arg1),
1730 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1731 TYPE_SATURATING (type));
1732 t = build_fixed (type, value);
1734 /* Propagate overflow flags. */
1735 if (overflow_p | TREE_OVERFLOW (arg1))
1736 TREE_OVERFLOW (t) = 1;
1740 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1741 to a fixed-point type. */
1744 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1746 FIXED_VALUE_TYPE value;
1750 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1751 &TREE_REAL_CST (arg1),
1752 TYPE_SATURATING (type));
1753 t = build_fixed (type, value);
1755 /* Propagate overflow flags. */
1756 if (overflow_p | TREE_OVERFLOW (arg1))
1757 TREE_OVERFLOW (t) = 1;
1761 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1762 type TYPE. If no simplification can be done return NULL_TREE. */
1765 fold_convert_const (enum tree_code code, tree type, tree arg1)
1767 if (TREE_TYPE (arg1) == type)
1770 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1771 || TREE_CODE (type) == OFFSET_TYPE)
1773 if (TREE_CODE (arg1) == INTEGER_CST)
1774 return fold_convert_const_int_from_int (type, arg1);
1775 else if (TREE_CODE (arg1) == REAL_CST)
1776 return fold_convert_const_int_from_real (code, type, arg1);
1777 else if (TREE_CODE (arg1) == FIXED_CST)
1778 return fold_convert_const_int_from_fixed (type, arg1);
1780 else if (TREE_CODE (type) == REAL_TYPE)
1782 if (TREE_CODE (arg1) == INTEGER_CST)
1783 return build_real_from_int_cst (type, arg1);
1784 else if (TREE_CODE (arg1) == REAL_CST)
1785 return fold_convert_const_real_from_real (type, arg1);
1786 else if (TREE_CODE (arg1) == FIXED_CST)
1787 return fold_convert_const_real_from_fixed (type, arg1);
1789 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1791 if (TREE_CODE (arg1) == FIXED_CST)
1792 return fold_convert_const_fixed_from_fixed (type, arg1);
1793 else if (TREE_CODE (arg1) == INTEGER_CST)
1794 return fold_convert_const_fixed_from_int (type, arg1);
1795 else if (TREE_CODE (arg1) == REAL_CST)
1796 return fold_convert_const_fixed_from_real (type, arg1);
1801 /* Construct a vector of zero elements of vector type TYPE. */
1804 build_zero_vector (tree type)
1808 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1809 return build_vector_from_val (type, t);
1812 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1815 fold_convertible_p (const_tree type, const_tree arg)
1817 tree orig = TREE_TYPE (arg);
1822 if (TREE_CODE (arg) == ERROR_MARK
1823 || TREE_CODE (type) == ERROR_MARK
1824 || TREE_CODE (orig) == ERROR_MARK)
1827 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1830 switch (TREE_CODE (type))
1832 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1833 case POINTER_TYPE: case REFERENCE_TYPE:
1835 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1836 || TREE_CODE (orig) == OFFSET_TYPE)
1838 return (TREE_CODE (orig) == VECTOR_TYPE
1839 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1842 case FIXED_POINT_TYPE:
1846 return TREE_CODE (type) == TREE_CODE (orig);
1853 /* Convert expression ARG to type TYPE. Used by the middle-end for
1854 simple conversions in preference to calling the front-end's convert. */
1857 fold_convert_loc (location_t loc, tree type, tree arg)
1859 tree orig = TREE_TYPE (arg);
1865 if (TREE_CODE (arg) == ERROR_MARK
1866 || TREE_CODE (type) == ERROR_MARK
1867 || TREE_CODE (orig) == ERROR_MARK)
1868 return error_mark_node;
1870 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1871 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1873 switch (TREE_CODE (type))
1876 case REFERENCE_TYPE:
1877 /* Handle conversions between pointers to different address spaces. */
1878 if (POINTER_TYPE_P (orig)
1879 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1880 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1881 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1884 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1886 if (TREE_CODE (arg) == INTEGER_CST)
1888 tem = fold_convert_const (NOP_EXPR, type, arg);
1889 if (tem != NULL_TREE)
1892 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1893 || TREE_CODE (orig) == OFFSET_TYPE)
1894 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 if (TREE_CODE (orig) == COMPLEX_TYPE)
1896 return fold_convert_loc (loc, type,
1897 fold_build1_loc (loc, REALPART_EXPR,
1898 TREE_TYPE (orig), arg));
1899 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1900 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1901 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1904 if (TREE_CODE (arg) == INTEGER_CST)
1906 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1910 else if (TREE_CODE (arg) == REAL_CST)
1912 tem = fold_convert_const (NOP_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1916 else if (TREE_CODE (arg) == FIXED_CST)
1918 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1923 switch (TREE_CODE (orig))
1926 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1927 case POINTER_TYPE: case REFERENCE_TYPE:
1928 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1931 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1933 case FIXED_POINT_TYPE:
1934 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1937 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1938 return fold_convert_loc (loc, type, tem);
1944 case FIXED_POINT_TYPE:
1945 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1946 || TREE_CODE (arg) == REAL_CST)
1948 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1949 if (tem != NULL_TREE)
1950 goto fold_convert_exit;
1953 switch (TREE_CODE (orig))
1955 case FIXED_POINT_TYPE:
1960 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1963 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1964 return fold_convert_loc (loc, type, tem);
1971 switch (TREE_CODE (orig))
1974 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1975 case POINTER_TYPE: case REFERENCE_TYPE:
1977 case FIXED_POINT_TYPE:
1978 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1979 fold_convert_loc (loc, TREE_TYPE (type), arg),
1980 fold_convert_loc (loc, TREE_TYPE (type),
1981 integer_zero_node));
1986 if (TREE_CODE (arg) == COMPLEX_EXPR)
1988 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1989 TREE_OPERAND (arg, 0));
1990 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1991 TREE_OPERAND (arg, 1));
1992 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1995 arg = save_expr (arg);
1996 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1997 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1998 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1999 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2000 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2008 if (integer_zerop (arg))
2009 return build_zero_vector (type);
2010 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2011 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2012 || TREE_CODE (orig) == VECTOR_TYPE);
2013 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2016 tem = fold_ignored_result (arg);
2017 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2023 protected_set_expr_location_unshare (tem, loc);
2027 /* Return false if expr can be assumed not to be an lvalue, true
2031 maybe_lvalue_p (const_tree x)
2033 /* We only need to wrap lvalue tree codes. */
2034 switch (TREE_CODE (x))
2047 case ARRAY_RANGE_REF:
2053 case PREINCREMENT_EXPR:
2054 case PREDECREMENT_EXPR:
2056 case TRY_CATCH_EXPR:
2057 case WITH_CLEANUP_EXPR:
2066 /* Assume the worst for front-end tree codes. */
2067 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2075 /* Return an expr equal to X but certainly not valid as an lvalue. */
2078 non_lvalue_loc (location_t loc, tree x)
2080 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2085 if (! maybe_lvalue_p (x))
2087 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2090 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2091 Zero means allow extended lvalues. */
2093 int pedantic_lvalues;
2095 /* When pedantic, return an expr equal to X but certainly not valid as a
2096 pedantic lvalue. Otherwise, return X. */
2099 pedantic_non_lvalue_loc (location_t loc, tree x)
2101 if (pedantic_lvalues)
2102 return non_lvalue_loc (loc, x);
2104 return protected_set_expr_location_unshare (x, loc);
2107 /* Given a tree comparison code, return the code that is the logical inverse
2108 of the given code. It is not safe to do this for floating-point
2109 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2110 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2113 invert_tree_comparison (enum tree_code code, bool honor_nans)
2115 if (honor_nans && flag_trapping_math)
2125 return honor_nans ? UNLE_EXPR : LE_EXPR;
2127 return honor_nans ? UNLT_EXPR : LT_EXPR;
2129 return honor_nans ? UNGE_EXPR : GE_EXPR;
2131 return honor_nans ? UNGT_EXPR : GT_EXPR;
2145 return UNORDERED_EXPR;
2146 case UNORDERED_EXPR:
2147 return ORDERED_EXPR;
2153 /* Similar, but return the comparison that results if the operands are
2154 swapped. This is safe for floating-point. */
2157 swap_tree_comparison (enum tree_code code)
2164 case UNORDERED_EXPR:
2190 /* Convert a comparison tree code from an enum tree_code representation
2191 into a compcode bit-based encoding. This function is the inverse of
2192 compcode_to_comparison. */
2194 static enum comparison_code
2195 comparison_to_compcode (enum tree_code code)
2212 return COMPCODE_ORD;
2213 case UNORDERED_EXPR:
2214 return COMPCODE_UNORD;
2216 return COMPCODE_UNLT;
2218 return COMPCODE_UNEQ;
2220 return COMPCODE_UNLE;
2222 return COMPCODE_UNGT;
2224 return COMPCODE_LTGT;
2226 return COMPCODE_UNGE;
2232 /* Convert a compcode bit-based encoding of a comparison operator back
2233 to GCC's enum tree_code representation. This function is the
2234 inverse of comparison_to_compcode. */
2236 static enum tree_code
2237 compcode_to_comparison (enum comparison_code code)
2254 return ORDERED_EXPR;
2255 case COMPCODE_UNORD:
2256 return UNORDERED_EXPR;
2274 /* Return a tree for the comparison which is the combination of
2275 doing the AND or OR (depending on CODE) of the two operations LCODE
2276 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2277 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2278 if this makes the transformation invalid. */
2281 combine_comparisons (location_t loc,
2282 enum tree_code code, enum tree_code lcode,
2283 enum tree_code rcode, tree truth_type,
2284 tree ll_arg, tree lr_arg)
2286 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2287 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2288 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2293 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2294 compcode = lcompcode & rcompcode;
2297 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2298 compcode = lcompcode | rcompcode;
2307 /* Eliminate unordered comparisons, as well as LTGT and ORD
2308 which are not used unless the mode has NaNs. */
2309 compcode &= ~COMPCODE_UNORD;
2310 if (compcode == COMPCODE_LTGT)
2311 compcode = COMPCODE_NE;
2312 else if (compcode == COMPCODE_ORD)
2313 compcode = COMPCODE_TRUE;
2315 else if (flag_trapping_math)
2317 /* Check that the original operation and the optimized ones will trap
2318 under the same condition. */
2319 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2320 && (lcompcode != COMPCODE_EQ)
2321 && (lcompcode != COMPCODE_ORD);
2322 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2323 && (rcompcode != COMPCODE_EQ)
2324 && (rcompcode != COMPCODE_ORD);
2325 bool trap = (compcode & COMPCODE_UNORD) == 0
2326 && (compcode != COMPCODE_EQ)
2327 && (compcode != COMPCODE_ORD);
2329 /* In a short-circuited boolean expression the LHS might be
2330 such that the RHS, if evaluated, will never trap. For
2331 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2332 if neither x nor y is NaN. (This is a mixed blessing: for
2333 example, the expression above will never trap, hence
2334 optimizing it to x < y would be invalid). */
2335 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2336 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2339 /* If the comparison was short-circuited, and only the RHS
2340 trapped, we may now generate a spurious trap. */
2342 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2345 /* If we changed the conditions that cause a trap, we lose. */
2346 if ((ltrap || rtrap) != trap)
2350 if (compcode == COMPCODE_TRUE)
2351 return constant_boolean_node (true, truth_type);
2352 else if (compcode == COMPCODE_FALSE)
2353 return constant_boolean_node (false, truth_type);
2356 enum tree_code tcode;
2358 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2359 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2363 /* Return nonzero if two operands (typically of the same tree node)
2364 are necessarily equal. If either argument has side-effects this
2365 function returns zero. FLAGS modifies behavior as follows:
2367 If OEP_ONLY_CONST is set, only return nonzero for constants.
2368 This function tests whether the operands are indistinguishable;
2369 it does not test whether they are equal using C's == operation.
2370 The distinction is important for IEEE floating point, because
2371 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2372 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2374 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2375 even though it may hold multiple values during a function.
2376 This is because a GCC tree node guarantees that nothing else is
2377 executed between the evaluation of its "operands" (which may often
2378 be evaluated in arbitrary order). Hence if the operands themselves
2379 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2380 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2381 unset means assuming isochronic (or instantaneous) tree equivalence.
2382 Unless comparing arbitrary expression trees, such as from different
2383 statements, this flag can usually be left unset.
2385 If OEP_PURE_SAME is set, then pure functions with identical arguments
2386 are considered the same. It is used when the caller has other ways
2387 to ensure that global memory is unchanged in between. */
2390 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2392 /* If either is ERROR_MARK, they aren't equal. */
2393 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2394 || TREE_TYPE (arg0) == error_mark_node
2395 || TREE_TYPE (arg1) == error_mark_node)
2398 /* Similar, if either does not have a type (like a released SSA name),
2399 they aren't equal. */
2400 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2403 /* Check equality of integer constants before bailing out due to
2404 precision differences. */
2405 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2406 return tree_int_cst_equal (arg0, arg1);
2408 /* If both types don't have the same signedness, then we can't consider
2409 them equal. We must check this before the STRIP_NOPS calls
2410 because they may change the signedness of the arguments. As pointers
2411 strictly don't have a signedness, require either two pointers or
2412 two non-pointers as well. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2414 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2417 /* We cannot consider pointers to different address space equal. */
2418 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2419 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2420 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2423 /* If both types don't have the same precision, then it is not safe
2425 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2431 /* In case both args are comparisons but with different comparison
2432 code, try to swap the comparison operands of one arg to produce
2433 a match and compare that variant. */
2434 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2435 && COMPARISON_CLASS_P (arg0)
2436 && COMPARISON_CLASS_P (arg1))
2438 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2440 if (TREE_CODE (arg0) == swap_code)
2441 return operand_equal_p (TREE_OPERAND (arg0, 0),
2442 TREE_OPERAND (arg1, 1), flags)
2443 && operand_equal_p (TREE_OPERAND (arg0, 1),
2444 TREE_OPERAND (arg1, 0), flags);
2447 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2448 /* This is needed for conversions and for COMPONENT_REF.
2449 Might as well play it safe and always test this. */
2450 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2451 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2452 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2455 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2456 We don't care about side effects in that case because the SAVE_EXPR
2457 takes care of that for us. In all other cases, two expressions are
2458 equal if they have no side effects. If we have two identical
2459 expressions with side effects that should be treated the same due
2460 to the only side effects being identical SAVE_EXPR's, that will
2461 be detected in the recursive calls below.
2462 If we are taking an invariant address of two identical objects
2463 they are necessarily equal as well. */
2464 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2465 && (TREE_CODE (arg0) == SAVE_EXPR
2466 || (flags & OEP_CONSTANT_ADDRESS_OF)
2467 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2470 /* Next handle constant cases, those for which we can return 1 even
2471 if ONLY_CONST is set. */
2472 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2473 switch (TREE_CODE (arg0))
2476 return tree_int_cst_equal (arg0, arg1);
2479 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2480 TREE_FIXED_CST (arg1));
2483 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2484 TREE_REAL_CST (arg1)))
2488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2490 /* If we do not distinguish between signed and unsigned zero,
2491 consider them equal. */
2492 if (real_zerop (arg0) && real_zerop (arg1))
2501 v1 = TREE_VECTOR_CST_ELTS (arg0);
2502 v2 = TREE_VECTOR_CST_ELTS (arg1);
2505 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2508 v1 = TREE_CHAIN (v1);
2509 v2 = TREE_CHAIN (v2);
2516 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2518 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2522 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2523 && ! memcmp (TREE_STRING_POINTER (arg0),
2524 TREE_STRING_POINTER (arg1),
2525 TREE_STRING_LENGTH (arg0)));
2528 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2529 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2530 ? OEP_CONSTANT_ADDRESS_OF : 0);
2535 if (flags & OEP_ONLY_CONST)
2538 /* Define macros to test an operand from arg0 and arg1 for equality and a
2539 variant that allows null and views null as being different from any
2540 non-null value. In the latter case, if either is null, the both
2541 must be; otherwise, do the normal comparison. */
2542 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2543 TREE_OPERAND (arg1, N), flags)
2545 #define OP_SAME_WITH_NULL(N) \
2546 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2547 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2549 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2552 /* Two conversions are equal only if signedness and modes match. */
2553 switch (TREE_CODE (arg0))
2556 case FIX_TRUNC_EXPR:
2557 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2558 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2568 case tcc_comparison:
2570 if (OP_SAME (0) && OP_SAME (1))
2573 /* For commutative ops, allow the other order. */
2574 return (commutative_tree_code (TREE_CODE (arg0))
2575 && operand_equal_p (TREE_OPERAND (arg0, 0),
2576 TREE_OPERAND (arg1, 1), flags)
2577 && operand_equal_p (TREE_OPERAND (arg0, 1),
2578 TREE_OPERAND (arg1, 0), flags));
2581 /* If either of the pointer (or reference) expressions we are
2582 dereferencing contain a side effect, these cannot be equal. */
2583 if (TREE_SIDE_EFFECTS (arg0)
2584 || TREE_SIDE_EFFECTS (arg1))
2587 switch (TREE_CODE (arg0))
2595 /* Require equal access sizes, and similar pointer types.
2596 We can have incomplete types for array references of
2597 variable-sized arrays from the Fortran frontent
2599 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2600 || (TYPE_SIZE (TREE_TYPE (arg0))
2601 && TYPE_SIZE (TREE_TYPE (arg1))
2602 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2603 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2604 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2605 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2606 && OP_SAME (0) && OP_SAME (1));
2609 case ARRAY_RANGE_REF:
2610 /* Operands 2 and 3 may be null.
2611 Compare the array index by value if it is constant first as we
2612 may have different types but same value here. */
2614 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2615 TREE_OPERAND (arg1, 1))
2617 && OP_SAME_WITH_NULL (2)
2618 && OP_SAME_WITH_NULL (3));
2621 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2622 may be NULL when we're called to compare MEM_EXPRs. */
2623 return OP_SAME_WITH_NULL (0)
2625 && OP_SAME_WITH_NULL (2);
2628 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2634 case tcc_expression:
2635 switch (TREE_CODE (arg0))
2638 case TRUTH_NOT_EXPR:
2641 case TRUTH_ANDIF_EXPR:
2642 case TRUTH_ORIF_EXPR:
2643 return OP_SAME (0) && OP_SAME (1);
2646 case WIDEN_MULT_PLUS_EXPR:
2647 case WIDEN_MULT_MINUS_EXPR:
2650 /* The multiplcation operands are commutative. */
2653 case TRUTH_AND_EXPR:
2655 case TRUTH_XOR_EXPR:
2656 if (OP_SAME (0) && OP_SAME (1))
2659 /* Otherwise take into account this is a commutative operation. */
2660 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2661 TREE_OPERAND (arg1, 1), flags)
2662 && operand_equal_p (TREE_OPERAND (arg0, 1),
2663 TREE_OPERAND (arg1, 0), flags));
2668 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2675 switch (TREE_CODE (arg0))
2678 /* If the CALL_EXPRs call different functions, then they
2679 clearly can not be equal. */
2680 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2685 unsigned int cef = call_expr_flags (arg0);
2686 if (flags & OEP_PURE_SAME)
2687 cef &= ECF_CONST | ECF_PURE;
2694 /* Now see if all the arguments are the same. */
2696 const_call_expr_arg_iterator iter0, iter1;
2698 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2699 a1 = first_const_call_expr_arg (arg1, &iter1);
2701 a0 = next_const_call_expr_arg (&iter0),
2702 a1 = next_const_call_expr_arg (&iter1))
2703 if (! operand_equal_p (a0, a1, flags))
2706 /* If we get here and both argument lists are exhausted
2707 then the CALL_EXPRs are equal. */
2708 return ! (a0 || a1);
2714 case tcc_declaration:
2715 /* Consider __builtin_sqrt equal to sqrt. */
2716 return (TREE_CODE (arg0) == FUNCTION_DECL
2717 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2718 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2719 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2726 #undef OP_SAME_WITH_NULL
2729 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2730 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2732 When in doubt, return 0. */
2735 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2737 int unsignedp1, unsignedpo;
2738 tree primarg0, primarg1, primother;
2739 unsigned int correct_width;
2741 if (operand_equal_p (arg0, arg1, 0))
2744 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2745 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2748 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2749 and see if the inner values are the same. This removes any
2750 signedness comparison, which doesn't matter here. */
2751 primarg0 = arg0, primarg1 = arg1;
2752 STRIP_NOPS (primarg0);
2753 STRIP_NOPS (primarg1);
2754 if (operand_equal_p (primarg0, primarg1, 0))
2757 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2758 actual comparison operand, ARG0.
2760 First throw away any conversions to wider types
2761 already present in the operands. */
2763 primarg1 = get_narrower (arg1, &unsignedp1);
2764 primother = get_narrower (other, &unsignedpo);
2766 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2767 if (unsignedp1 == unsignedpo
2768 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2769 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2771 tree type = TREE_TYPE (arg0);
2773 /* Make sure shorter operand is extended the right way
2774 to match the longer operand. */
2775 primarg1 = fold_convert (signed_or_unsigned_type_for
2776 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2778 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2785 /* See if ARG is an expression that is either a comparison or is performing
2786 arithmetic on comparisons. The comparisons must only be comparing
2787 two different values, which will be stored in *CVAL1 and *CVAL2; if
2788 they are nonzero it means that some operands have already been found.
2789 No variables may be used anywhere else in the expression except in the
2790 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2791 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2793 If this is true, return 1. Otherwise, return zero. */
2796 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2798 enum tree_code code = TREE_CODE (arg);
2799 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2801 /* We can handle some of the tcc_expression cases here. */
2802 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2804 else if (tclass == tcc_expression
2805 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2806 || code == COMPOUND_EXPR))
2807 tclass = tcc_binary;
2809 else if (tclass == tcc_expression && code == SAVE_EXPR
2810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2812 /* If we've already found a CVAL1 or CVAL2, this expression is
2813 two complex to handle. */
2814 if (*cval1 || *cval2)
2824 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2827 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2828 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2829 cval1, cval2, save_p));
2834 case tcc_expression:
2835 if (code == COND_EXPR)
2836 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2837 cval1, cval2, save_p)
2838 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2841 cval1, cval2, save_p));
2844 case tcc_comparison:
2845 /* First see if we can handle the first operand, then the second. For
2846 the second operand, we know *CVAL1 can't be zero. It must be that
2847 one side of the comparison is each of the values; test for the
2848 case where this isn't true by failing if the two operands
2851 if (operand_equal_p (TREE_OPERAND (arg, 0),
2852 TREE_OPERAND (arg, 1), 0))
2856 *cval1 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2859 else if (*cval2 == 0)
2860 *cval2 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2866 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2868 else if (*cval2 == 0)
2869 *cval2 = TREE_OPERAND (arg, 1);
2870 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2882 /* ARG is a tree that is known to contain just arithmetic operations and
2883 comparisons. Evaluate the operations in the tree substituting NEW0 for
2884 any occurrence of OLD0 as an operand of a comparison and likewise for
2888 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2889 tree old1, tree new1)
2891 tree type = TREE_TYPE (arg);
2892 enum tree_code code = TREE_CODE (arg);
2893 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2895 /* We can handle some of the tcc_expression cases here. */
2896 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2898 else if (tclass == tcc_expression
2899 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2900 tclass = tcc_binary;
2905 return fold_build1_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1));
2910 return fold_build2_loc (loc, code, type,
2911 eval_subst (loc, TREE_OPERAND (arg, 0),
2912 old0, new0, old1, new1),
2913 eval_subst (loc, TREE_OPERAND (arg, 1),
2914 old0, new0, old1, new1));
2916 case tcc_expression:
2920 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2924 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2928 return fold_build3_loc (loc, code, type,
2929 eval_subst (loc, TREE_OPERAND (arg, 0),
2930 old0, new0, old1, new1),
2931 eval_subst (loc, TREE_OPERAND (arg, 1),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 2),
2934 old0, new0, old1, new1));
2938 /* Fall through - ??? */
2940 case tcc_comparison:
2942 tree arg0 = TREE_OPERAND (arg, 0);
2943 tree arg1 = TREE_OPERAND (arg, 1);
2945 /* We need to check both for exact equality and tree equality. The
2946 former will be true if the operand has a side-effect. In that
2947 case, we know the operand occurred exactly once. */
2949 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2951 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2954 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2956 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2959 return fold_build2_loc (loc, code, type, arg0, arg1);
2967 /* Return a tree for the case when the result of an expression is RESULT
2968 converted to TYPE and OMITTED was previously an operand of the expression
2969 but is now not needed (e.g., we folded OMITTED * 0).
2971 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2972 the conversion of RESULT to TYPE. */
2975 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2977 tree t = fold_convert_loc (loc, type, result);
2979 /* If the resulting operand is an empty statement, just return the omitted
2980 statement casted to void. */
2981 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2982 return build1_loc (loc, NOP_EXPR, void_type_node,
2983 fold_ignored_result (omitted));
2985 if (TREE_SIDE_EFFECTS (omitted))
2986 return build2_loc (loc, COMPOUND_EXPR, type,
2987 fold_ignored_result (omitted), t);
2989 return non_lvalue_loc (loc, t);
2992 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2995 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2998 tree t = fold_convert_loc (loc, type, result);
3000 /* If the resulting operand is an empty statement, just return the omitted
3001 statement casted to void. */
3002 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3003 return build1_loc (loc, NOP_EXPR, void_type_node,
3004 fold_ignored_result (omitted));
3006 if (TREE_SIDE_EFFECTS (omitted))
3007 return build2_loc (loc, COMPOUND_EXPR, type,
3008 fold_ignored_result (omitted), t);
3010 return pedantic_non_lvalue_loc (loc, t);
3013 /* Return a tree for the case when the result of an expression is RESULT
3014 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3015 of the expression but are now not needed.
3017 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3018 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3019 evaluated before OMITTED2. Otherwise, if neither has side effects,
3020 just do the conversion of RESULT to TYPE. */
3023 omit_two_operands_loc (location_t loc, tree type, tree result,
3024 tree omitted1, tree omitted2)
3026 tree t = fold_convert_loc (loc, type, result);
3028 if (TREE_SIDE_EFFECTS (omitted2))
3029 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3030 if (TREE_SIDE_EFFECTS (omitted1))
3031 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3033 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3037 /* Return a simplified tree node for the truth-negation of ARG. This
3038 never alters ARG itself. We assume that ARG is an operation that
3039 returns a truth value (0 or 1).
3041 FIXME: one would think we would fold the result, but it causes
3042 problems with the dominator optimizer. */
3045 fold_truth_not_expr (location_t loc, tree arg)
3047 tree type = TREE_TYPE (arg);
3048 enum tree_code code = TREE_CODE (arg);
3049 location_t loc1, loc2;
3051 /* If this is a comparison, we can simply invert it, except for
3052 floating-point non-equality comparisons, in which case we just
3053 enclose a TRUTH_NOT_EXPR around what we have. */
3055 if (TREE_CODE_CLASS (code) == tcc_comparison)
3057 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3058 if (FLOAT_TYPE_P (op_type)
3059 && flag_trapping_math
3060 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3061 && code != NE_EXPR && code != EQ_EXPR)
3064 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3065 if (code == ERROR_MARK)
3068 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3069 TREE_OPERAND (arg, 1));
3075 return constant_boolean_node (integer_zerop (arg), type);
3077 case TRUTH_AND_EXPR:
3078 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3079 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3080 return build2_loc (loc, TRUTH_OR_EXPR, type,
3081 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3082 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3086 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3087 return build2_loc (loc, TRUTH_AND_EXPR, type,
3088 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3089 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3091 case TRUTH_XOR_EXPR:
3092 /* Here we can invert either operand. We invert the first operand
3093 unless the second operand is a TRUTH_NOT_EXPR in which case our
3094 result is the XOR of the first operand with the inside of the
3095 negation of the second operand. */
3097 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3098 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3099 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3101 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3102 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3103 TREE_OPERAND (arg, 1));
3105 case TRUTH_ANDIF_EXPR:
3106 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3107 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3109 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3110 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3112 case TRUTH_ORIF_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3115 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3116 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3117 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3119 case TRUTH_NOT_EXPR:
3120 return TREE_OPERAND (arg, 0);
3124 tree arg1 = TREE_OPERAND (arg, 1);
3125 tree arg2 = TREE_OPERAND (arg, 2);
3127 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3130 /* A COND_EXPR may have a throw as one operand, which
3131 then has void type. Just leave void operands
3133 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3134 VOID_TYPE_P (TREE_TYPE (arg1))
3135 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3136 VOID_TYPE_P (TREE_TYPE (arg2))
3137 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3142 return build2_loc (loc, COMPOUND_EXPR, type,
3143 TREE_OPERAND (arg, 0),
3144 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3146 case NON_LVALUE_EXPR:
3147 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3148 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3151 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3152 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3154 /* ... fall through ... */
3157 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3158 return build1_loc (loc, TREE_CODE (arg), type,
3159 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3162 if (!integer_onep (TREE_OPERAND (arg, 1)))
3164 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3167 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3169 case CLEANUP_POINT_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3179 /* Return a simplified tree node for the truth-negation of ARG. This
3180 never alters ARG itself. We assume that ARG is an operation that
3181 returns a truth value (0 or 1).
3183 FIXME: one would think we would fold the result, but it causes
3184 problems with the dominator optimizer. */
3187 invert_truthvalue_loc (location_t loc, tree arg)
3191 if (TREE_CODE (arg) == ERROR_MARK)
3194 tem = fold_truth_not_expr (loc, arg);
3196 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3201 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3202 operands are another bit-wise operation with a common input. If so,
3203 distribute the bit operations to save an operation and possibly two if
3204 constants are involved. For example, convert
3205 (A | B) & (A | C) into A | (B & C)
3206 Further simplification will occur if B and C are constants.
3208 If this optimization cannot be done, 0 will be returned. */
3211 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3212 tree arg0, tree arg1)
3217 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3218 || TREE_CODE (arg0) == code
3219 || (TREE_CODE (arg0) != BIT_AND_EXPR
3220 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3223 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3225 common = TREE_OPERAND (arg0, 0);
3226 left = TREE_OPERAND (arg0, 1);
3227 right = TREE_OPERAND (arg1, 1);
3229 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3231 common = TREE_OPERAND (arg0, 0);
3232 left = TREE_OPERAND (arg0, 1);
3233 right = TREE_OPERAND (arg1, 0);
3235 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3237 common = TREE_OPERAND (arg0, 1);
3238 left = TREE_OPERAND (arg0, 0);
3239 right = TREE_OPERAND (arg1, 1);
3241 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3243 common = TREE_OPERAND (arg0, 1);
3244 left = TREE_OPERAND (arg0, 0);
3245 right = TREE_OPERAND (arg1, 0);
3250 common = fold_convert_loc (loc, type, common);
3251 left = fold_convert_loc (loc, type, left);
3252 right = fold_convert_loc (loc, type, right);
3253 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3254 fold_build2_loc (loc, code, type, left, right));
3257 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3258 with code CODE. This optimization is unsafe. */
3260 distribute_real_division (location_t loc, enum tree_code code, tree type,
3261 tree arg0, tree arg1)
3263 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3264 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3266 /* (A / C) +- (B / C) -> (A +- B) / C. */
3268 && operand_equal_p (TREE_OPERAND (arg0, 1),
3269 TREE_OPERAND (arg1, 1), 0))
3270 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3271 fold_build2_loc (loc, code, type,
3272 TREE_OPERAND (arg0, 0),
3273 TREE_OPERAND (arg1, 0)),
3274 TREE_OPERAND (arg0, 1));
3276 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3277 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3278 TREE_OPERAND (arg1, 0), 0)
3279 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3280 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3282 REAL_VALUE_TYPE r0, r1;
3283 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3284 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3286 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3288 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3289 real_arithmetic (&r0, code, &r0, &r1);
3290 return fold_build2_loc (loc, MULT_EXPR, type,
3291 TREE_OPERAND (arg0, 0),
3292 build_real (type, r0));
3298 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3299 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3302 make_bit_field_ref (location_t loc, tree inner, tree type,
3303 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3305 tree result, bftype;
3309 tree size = TYPE_SIZE (TREE_TYPE (inner));
3310 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3311 || POINTER_TYPE_P (TREE_TYPE (inner)))
3312 && host_integerp (size, 0)
3313 && tree_low_cst (size, 0) == bitsize)
3314 return fold_convert_loc (loc, type, inner);
3318 if (TYPE_PRECISION (bftype) != bitsize
3319 || TYPE_UNSIGNED (bftype) == !unsignedp)
3320 bftype = build_nonstandard_integer_type (bitsize, 0);
3322 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3323 size_int (bitsize), bitsize_int (bitpos));
3326 result = fold_convert_loc (loc, type, result);
3331 /* Optimize a bit-field compare.
3333 There are two cases: First is a compare against a constant and the
3334 second is a comparison of two items where the fields are at the same
3335 bit position relative to the start of a chunk (byte, halfword, word)
3336 large enough to contain it. In these cases we can avoid the shift
3337 implicit in bitfield extractions.
3339 For constants, we emit a compare of the shifted constant with the
3340 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3341 compared. For two fields at the same position, we do the ANDs with the
3342 similar mask and compare the result of the ANDs.
3344 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3345 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3346 are the left and right operands of the comparison, respectively.
3348 If the optimization described above can be done, we return the resulting
3349 tree. Otherwise we return zero. */
3352 optimize_bit_field_compare (location_t loc, enum tree_code code,
3353 tree compare_type, tree lhs, tree rhs)
3355 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3356 tree type = TREE_TYPE (lhs);
3357 tree signed_type, unsigned_type;
3358 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3359 enum machine_mode lmode, rmode, nmode;
3360 int lunsignedp, runsignedp;
3361 int lvolatilep = 0, rvolatilep = 0;
3362 tree linner, rinner = NULL_TREE;
3366 /* Get all the information about the extractions being done. If the bit size
3367 if the same as the size of the underlying object, we aren't doing an
3368 extraction at all and so can do nothing. We also don't want to
3369 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3370 then will no longer be able to replace it. */
3371 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3372 &lunsignedp, &lvolatilep, false);
3373 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3374 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3379 /* If this is not a constant, we can only do something if bit positions,
3380 sizes, and signedness are the same. */
3381 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3382 &runsignedp, &rvolatilep, false);
3384 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3385 || lunsignedp != runsignedp || offset != 0
3386 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3390 /* See if we can find a mode to refer to this field. We should be able to,
3391 but fail if we can't. */
3393 && GET_MODE_BITSIZE (lmode) > 0
3394 && flag_strict_volatile_bitfields > 0)
3397 nmode = get_best_mode (lbitsize, lbitpos,
3398 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3399 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3400 TYPE_ALIGN (TREE_TYPE (rinner))),
3401 word_mode, lvolatilep || rvolatilep);
3402 if (nmode == VOIDmode)
3405 /* Set signed and unsigned types of the precision of this mode for the
3407 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3408 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3410 /* Compute the bit position and size for the new reference and our offset
3411 within it. If the new reference is the same size as the original, we
3412 won't optimize anything, so return zero. */
3413 nbitsize = GET_MODE_BITSIZE (nmode);
3414 nbitpos = lbitpos & ~ (nbitsize - 1);
3416 if (nbitsize == lbitsize)
3419 if (BYTES_BIG_ENDIAN)
3420 lbitpos = nbitsize - lbitsize - lbitpos;
3422 /* Make the mask to be used against the extracted field. */
3423 mask = build_int_cst_type (unsigned_type, -1);
3424 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3425 mask = const_binop (RSHIFT_EXPR, mask,
3426 size_int (nbitsize - lbitsize - lbitpos));
3429 /* If not comparing with constant, just rework the comparison
3431 return fold_build2_loc (loc, code, compare_type,
3432 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3433 make_bit_field_ref (loc, linner,
3438 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3439 make_bit_field_ref (loc, rinner,
3445 /* Otherwise, we are handling the constant case. See if the constant is too
3446 big for the field. Warn and return a tree of for 0 (false) if so. We do
3447 this not only for its own sake, but to avoid having to test for this
3448 error case below. If we didn't, we might generate wrong code.
3450 For unsigned fields, the constant shifted right by the field length should
3451 be all zero. For signed fields, the high-order bits should agree with
3456 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3457 fold_convert_loc (loc,
3458 unsigned_type, rhs),
3459 size_int (lbitsize))))
3461 warning (0, "comparison is always %d due to width of bit-field",
3463 return constant_boolean_node (code == NE_EXPR, compare_type);
3468 tree tem = const_binop (RSHIFT_EXPR,
3469 fold_convert_loc (loc, signed_type, rhs),
3470 size_int (lbitsize - 1));
3471 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3473 warning (0, "comparison is always %d due to width of bit-field",
3475 return constant_boolean_node (code == NE_EXPR, compare_type);
3479 /* Single-bit compares should always be against zero. */
3480 if (lbitsize == 1 && ! integer_zerop (rhs))
3482 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3483 rhs = build_int_cst (type, 0);
3486 /* Make a new bitfield reference, shift the constant over the
3487 appropriate number of bits and mask it with the computed mask
3488 (in case this was a signed field). If we changed it, make a new one. */
3489 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3492 TREE_SIDE_EFFECTS (lhs) = 1;
3493 TREE_THIS_VOLATILE (lhs) = 1;
3496 rhs = const_binop (BIT_AND_EXPR,
3497 const_binop (LSHIFT_EXPR,
3498 fold_convert_loc (loc, unsigned_type, rhs),
3499 size_int (lbitpos)),
3502 lhs = build2_loc (loc, code, compare_type,
3503 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3507 /* Subroutine for fold_truthop: decode a field reference.
3509 If EXP is a comparison reference, we return the innermost reference.
3511 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3512 set to the starting bit number.
3514 If the innermost field can be completely contained in a mode-sized
3515 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3517 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3518 otherwise it is not changed.
3520 *PUNSIGNEDP is set to the signedness of the field.
3522 *PMASK is set to the mask used. This is either contained in a
3523 BIT_AND_EXPR or derived from the width of the field.
3525 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3527 Return 0 if this is not a component reference or is one that we can't
3528 do anything with. */
3531 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3532 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3533 int *punsignedp, int *pvolatilep,
3534 tree *pmask, tree *pand_mask)
3536 tree outer_type = 0;
3538 tree mask, inner, offset;
3540 unsigned int precision;
3542 /* All the optimizations using this function assume integer fields.
3543 There are problems with FP fields since the type_for_size call
3544 below can fail for, e.g., XFmode. */
3545 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3548 /* We are interested in the bare arrangement of bits, so strip everything
3549 that doesn't affect the machine mode. However, record the type of the
3550 outermost expression if it may matter below. */
3551 if (CONVERT_EXPR_P (exp)
3552 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3553 outer_type = TREE_TYPE (exp);
3556 if (TREE_CODE (exp) == BIT_AND_EXPR)
3558 and_mask = TREE_OPERAND (exp, 1);
3559 exp = TREE_OPERAND (exp, 0);
3560 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3561 if (TREE_CODE (and_mask) != INTEGER_CST)
3565 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3566 punsignedp, pvolatilep, false);
3567 if ((inner == exp && and_mask == 0)
3568 || *pbitsize < 0 || offset != 0
3569 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3572 /* If the number of bits in the reference is the same as the bitsize of
3573 the outer type, then the outer type gives the signedness. Otherwise
3574 (in case of a small bitfield) the signedness is unchanged. */
3575 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3576 *punsignedp = TYPE_UNSIGNED (outer_type);
3578 /* Compute the mask to access the bitfield. */
3579 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3580 precision = TYPE_PRECISION (unsigned_type);
3582 mask = build_int_cst_type (unsigned_type, -1);
3584 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3585 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3587 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3589 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3590 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3593 *pand_mask = and_mask;
3597 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3601 all_ones_mask_p (const_tree mask, int size)
3603 tree type = TREE_TYPE (mask);
3604 unsigned int precision = TYPE_PRECISION (type);
3607 tmask = build_int_cst_type (signed_type_for (type), -1);
3610 tree_int_cst_equal (mask,
3611 const_binop (RSHIFT_EXPR,
3612 const_binop (LSHIFT_EXPR, tmask,
3613 size_int (precision - size)),
3614 size_int (precision - size)));
3617 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3618 represents the sign bit of EXP's type. If EXP represents a sign
3619 or zero extension, also test VAL against the unextended type.
3620 The return value is the (sub)expression whose sign bit is VAL,
3621 or NULL_TREE otherwise. */
3624 sign_bit_p (tree exp, const_tree val)
3626 unsigned HOST_WIDE_INT mask_lo, lo;
3627 HOST_WIDE_INT mask_hi, hi;
3631 /* Tree EXP must have an integral type. */
3632 t = TREE_TYPE (exp);
3633 if (! INTEGRAL_TYPE_P (t))
3636 /* Tree VAL must be an integer constant. */
3637 if (TREE_CODE (val) != INTEGER_CST
3638 || TREE_OVERFLOW (val))
3641 width = TYPE_PRECISION (t);
3642 if (width > HOST_BITS_PER_WIDE_INT)
3644 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3647 mask_hi = ((unsigned HOST_WIDE_INT) -1
3648 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3654 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3657 mask_lo = ((unsigned HOST_WIDE_INT) -1
3658 >> (HOST_BITS_PER_WIDE_INT - width));
3661 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3662 treat VAL as if it were unsigned. */
3663 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3664 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3667 /* Handle extension from a narrower type. */
3668 if (TREE_CODE (exp) == NOP_EXPR
3669 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3670 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3675 /* Subroutine for fold_truthop: determine if an operand is simple enough
3676 to be evaluated unconditionally. */
3679 simple_operand_p (const_tree exp)
3681 /* Strip any conversions that don't change the machine mode. */
3684 return (CONSTANT_CLASS_P (exp)
3685 || TREE_CODE (exp) == SSA_NAME
3687 && ! TREE_ADDRESSABLE (exp)
3688 && ! TREE_THIS_VOLATILE (exp)
3689 && ! DECL_NONLOCAL (exp)
3690 /* Don't regard global variables as simple. They may be
3691 allocated in ways unknown to the compiler (shared memory,
3692 #pragma weak, etc). */
3693 && ! TREE_PUBLIC (exp)
3694 && ! DECL_EXTERNAL (exp)
3695 /* Loading a static variable is unduly expensive, but global
3696 registers aren't expensive. */
3697 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3700 /* The following functions are subroutines to fold_range_test and allow it to
3701 try to change a logical combination of comparisons into a range test.
3704 X == 2 || X == 3 || X == 4 || X == 5
3708 (unsigned) (X - 2) <= 3
3710 We describe each set of comparisons as being either inside or outside
3711 a range, using a variable named like IN_P, and then describe the
3712 range with a lower and upper bound. If one of the bounds is omitted,
3713 it represents either the highest or lowest value of the type.
3715 In the comments below, we represent a range by two numbers in brackets
3716 preceded by a "+" to designate being inside that range, or a "-" to
3717 designate being outside that range, so the condition can be inverted by
3718 flipping the prefix. An omitted bound is represented by a "-". For
3719 example, "- [-, 10]" means being outside the range starting at the lowest
3720 possible value and ending at 10, in other words, being greater than 10.
3721 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3724 We set up things so that the missing bounds are handled in a consistent
3725 manner so neither a missing bound nor "true" and "false" need to be
3726 handled using a special case. */
3728 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3729 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3730 and UPPER1_P are nonzero if the respective argument is an upper bound
3731 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3732 must be specified for a comparison. ARG1 will be converted to ARG0's
3733 type if both are specified. */
3736 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3737 tree arg1, int upper1_p)
3743 /* If neither arg represents infinity, do the normal operation.
3744 Else, if not a comparison, return infinity. Else handle the special
3745 comparison rules. Note that most of the cases below won't occur, but
3746 are handled for consistency. */
3748 if (arg0 != 0 && arg1 != 0)
3750 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3751 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3753 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3756 if (TREE_CODE_CLASS (code) != tcc_comparison)
3759 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3760 for neither. In real maths, we cannot assume open ended ranges are
3761 the same. But, this is computer arithmetic, where numbers are finite.
3762 We can therefore make the transformation of any unbounded range with
3763 the value Z, Z being greater than any representable number. This permits
3764 us to treat unbounded ranges as equal. */
3765 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3766 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3770 result = sgn0 == sgn1;
3773 result = sgn0 != sgn1;
3776 result = sgn0 < sgn1;
3779 result = sgn0 <= sgn1;
3782 result = sgn0 > sgn1;
3785 result = sgn0 >= sgn1;
3791 return constant_boolean_node (result, type);
3794 /* Given EXP, a logical expression, set the range it is testing into
3795 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3796 actually being tested. *PLOW and *PHIGH will be made of the same
3797 type as the returned expression. If EXP is not a comparison, we
3798 will most likely not be returning a useful value and range. Set
3799 *STRICT_OVERFLOW_P to true if the return value is only valid
3800 because signed overflow is undefined; otherwise, do not change
3801 *STRICT_OVERFLOW_P. */
3804 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3805 bool *strict_overflow_p)
3807 enum tree_code code;
3808 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3809 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3811 tree low, high, n_low, n_high;
3812 location_t loc = EXPR_LOCATION (exp);
3814 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3815 and see if we can refine the range. Some of the cases below may not
3816 happen, but it doesn't seem worth worrying about this. We "continue"
3817 the outer loop when we've changed something; otherwise we "break"
3818 the switch, which will "break" the while. */
3821 low = high = build_int_cst (TREE_TYPE (exp), 0);
3825 code = TREE_CODE (exp);
3826 exp_type = TREE_TYPE (exp);
3828 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3830 if (TREE_OPERAND_LENGTH (exp) > 0)
3831 arg0 = TREE_OPERAND (exp, 0);
3832 if (TREE_CODE_CLASS (code) == tcc_comparison
3833 || TREE_CODE_CLASS (code) == tcc_unary
3834 || TREE_CODE_CLASS (code) == tcc_binary)
3835 arg0_type = TREE_TYPE (arg0);
3836 if (TREE_CODE_CLASS (code) == tcc_binary
3837 || TREE_CODE_CLASS (code) == tcc_comparison
3838 || (TREE_CODE_CLASS (code) == tcc_expression
3839 && TREE_OPERAND_LENGTH (exp) > 1))
3840 arg1 = TREE_OPERAND (exp, 1);
3845 case TRUTH_NOT_EXPR:
3846 in_p = ! in_p, exp = arg0;
3849 case EQ_EXPR: case NE_EXPR:
3850 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3851 /* We can only do something if the range is testing for zero
3852 and if the second operand is an integer constant. Note that
3853 saying something is "in" the range we make is done by
3854 complementing IN_P since it will set in the initial case of
3855 being not equal to zero; "out" is leaving it alone. */
3856 if (low == 0 || high == 0
3857 || ! integer_zerop (low) || ! integer_zerop (high)
3858 || TREE_CODE (arg1) != INTEGER_CST)
3863 case NE_EXPR: /* - [c, c] */
3866 case EQ_EXPR: /* + [c, c] */
3867 in_p = ! in_p, low = high = arg1;
3869 case GT_EXPR: /* - [-, c] */
3870 low = 0, high = arg1;
3872 case GE_EXPR: /* + [c, -] */
3873 in_p = ! in_p, low = arg1, high = 0;
3875 case LT_EXPR: /* - [c, -] */
3876 low = arg1, high = 0;
3878 case LE_EXPR: /* + [-, c] */
3879 in_p = ! in_p, low = 0, high = arg1;
3885 /* If this is an unsigned comparison, we also know that EXP is
3886 greater than or equal to zero. We base the range tests we make
3887 on that fact, so we record it here so we can parse existing
3888 range tests. We test arg0_type since often the return type
3889 of, e.g. EQ_EXPR, is boolean. */
3890 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3892 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3894 build_int_cst (arg0_type, 0),
3898 in_p = n_in_p, low = n_low, high = n_high;
3900 /* If the high bound is missing, but we have a nonzero low
3901 bound, reverse the range so it goes from zero to the low bound
3903 if (high == 0 && low && ! integer_zerop (low))
3906 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3907 integer_one_node, 0);
3908 low = build_int_cst (arg0_type, 0);
3916 /* (-x) IN [a,b] -> x in [-b, -a] */
3917 n_low = range_binop (MINUS_EXPR, exp_type,
3918 build_int_cst (exp_type, 0),
3920 n_high = range_binop (MINUS_EXPR, exp_type,
3921 build_int_cst (exp_type, 0),
3923 if (n_high != 0 && TREE_OVERFLOW (n_high))
3929 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3930 build_int_cst (exp_type, 1));
3933 case PLUS_EXPR: case MINUS_EXPR:
3934 if (TREE_CODE (arg1) != INTEGER_CST)
3937 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3938 move a constant to the other side. */
3939 if (!TYPE_UNSIGNED (arg0_type)
3940 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3943 /* If EXP is signed, any overflow in the computation is undefined,
3944 so we don't worry about it so long as our computations on
3945 the bounds don't overflow. For unsigned, overflow is defined
3946 and this is exactly the right thing. */
3947 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3948 arg0_type, low, 0, arg1, 0);
3949 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3950 arg0_type, high, 1, arg1, 0);
3951 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3952 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3955 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3956 *strict_overflow_p = true;
3959 /* Check for an unsigned range which has wrapped around the maximum
3960 value thus making n_high < n_low, and normalize it. */
3961 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3963 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3964 integer_one_node, 0);
3965 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3966 integer_one_node, 0);
3968 /* If the range is of the form +/- [ x+1, x ], we won't
3969 be able to normalize it. But then, it represents the
3970 whole range or the empty set, so make it
3972 if (tree_int_cst_equal (n_low, low)
3973 && tree_int_cst_equal (n_high, high))
3979 low = n_low, high = n_high;
3984 CASE_CONVERT: case NON_LVALUE_EXPR:
3985 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3988 if (! INTEGRAL_TYPE_P (arg0_type)
3989 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3990 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3993 n_low = low, n_high = high;
3996 n_low = fold_convert_loc (loc, arg0_type, n_low);
3999 n_high = fold_convert_loc (loc, arg0_type, n_high);
4002 /* If we're converting arg0 from an unsigned type, to exp,
4003 a signed type, we will be doing the comparison as unsigned.
4004 The tests above have already verified that LOW and HIGH
4007 So we have to ensure that we will handle large unsigned
4008 values the same way that the current signed bounds treat
4011 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4015 /* For fixed-point modes, we need to pass the saturating flag
4016 as the 2nd parameter. */
4017 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4018 equiv_type = lang_hooks.types.type_for_mode
4019 (TYPE_MODE (arg0_type),
4020 TYPE_SATURATING (arg0_type));
4022 equiv_type = lang_hooks.types.type_for_mode
4023 (TYPE_MODE (arg0_type), 1);
4025 /* A range without an upper bound is, naturally, unbounded.
4026 Since convert would have cropped a very large value, use
4027 the max value for the destination type. */
4029 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4030 : TYPE_MAX_VALUE (arg0_type);
4032 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4033 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4034 fold_convert_loc (loc, arg0_type,
4036 build_int_cst (arg0_type, 1));
4038 /* If the low bound is specified, "and" the range with the
4039 range for which the original unsigned value will be
4043 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4044 1, n_low, n_high, 1,
4045 fold_convert_loc (loc, arg0_type,
4050 in_p = (n_in_p == in_p);
4054 /* Otherwise, "or" the range with the range of the input
4055 that will be interpreted as negative. */
4056 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4057 0, n_low, n_high, 1,
4058 fold_convert_loc (loc, arg0_type,
4063 in_p = (in_p != n_in_p);
4068 low = n_low, high = n_high;
4078 /* If EXP is a constant, we can evaluate whether this is true or false. */
4079 if (TREE_CODE (exp) == INTEGER_CST)
4081 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4083 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4089 *pin_p = in_p, *plow = low, *phigh = high;
4093 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4094 type, TYPE, return an expression to test if EXP is in (or out of, depending
4095 on IN_P) the range. Return 0 if the test couldn't be created. */
4098 build_range_check (location_t loc, tree type, tree exp, int in_p,
4099 tree low, tree high)
4101 tree etype = TREE_TYPE (exp), value;
4103 #ifdef HAVE_canonicalize_funcptr_for_compare
4104 /* Disable this optimization for function pointer expressions
4105 on targets that require function pointer canonicalization. */
4106 if (HAVE_canonicalize_funcptr_for_compare
4107 && TREE_CODE (etype) == POINTER_TYPE
4108 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4114 value = build_range_check (loc, type, exp, 1, low, high);
4116 return invert_truthvalue_loc (loc, value);
4121 if (low == 0 && high == 0)
4122 return build_int_cst (type, 1);
4125 return fold_build2_loc (loc, LE_EXPR, type, exp,
4126 fold_convert_loc (loc, etype, high));
4129 return fold_build2_loc (loc, GE_EXPR, type, exp,
4130 fold_convert_loc (loc, etype, low));
4132 if (operand_equal_p (low, high, 0))
4133 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4134 fold_convert_loc (loc, etype, low));
4136 if (integer_zerop (low))
4138 if (! TYPE_UNSIGNED (etype))
4140 etype = unsigned_type_for (etype);
4141 high = fold_convert_loc (loc, etype, high);
4142 exp = fold_convert_loc (loc, etype, exp);
4144 return build_range_check (loc, type, exp, 1, 0, high);
4147 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4148 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4150 unsigned HOST_WIDE_INT lo;
4154 prec = TYPE_PRECISION (etype);
4155 if (prec <= HOST_BITS_PER_WIDE_INT)
4158 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4162 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4163 lo = (unsigned HOST_WIDE_INT) -1;
4166 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4168 if (TYPE_UNSIGNED (etype))
4170 tree signed_etype = signed_type_for (etype);
4171 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4173 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4175 etype = signed_etype;
4176 exp = fold_convert_loc (loc, etype, exp);
4178 return fold_build2_loc (loc, GT_EXPR, type, exp,
4179 build_int_cst (etype, 0));
4183 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4184 This requires wrap-around arithmetics for the type of the expression.
4185 First make sure that arithmetics in this type is valid, then make sure
4186 that it wraps around. */
4187 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4188 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4189 TYPE_UNSIGNED (etype));
4191 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4193 tree utype, minv, maxv;
4195 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4196 for the type in question, as we rely on this here. */
4197 utype = unsigned_type_for (etype);
4198 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4199 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4200 integer_one_node, 1);
4201 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4203 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4210 high = fold_convert_loc (loc, etype, high);
4211 low = fold_convert_loc (loc, etype, low);
4212 exp = fold_convert_loc (loc, etype, exp);
4214 value = const_binop (MINUS_EXPR, high, low);
4217 if (POINTER_TYPE_P (etype))
4219 if (value != 0 && !TREE_OVERFLOW (value))
4221 low = fold_convert_loc (loc, sizetype, low);
4222 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4223 return build_range_check (loc, type,
4224 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4226 1, build_int_cst (etype, 0), value);
4231 if (value != 0 && !TREE_OVERFLOW (value))
4232 return build_range_check (loc, type,
4233 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4234 1, build_int_cst (etype, 0), value);
4239 /* Return the predecessor of VAL in its type, handling the infinite case. */
4242 range_predecessor (tree val)
4244 tree type = TREE_TYPE (val);
4246 if (INTEGRAL_TYPE_P (type)
4247 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4250 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4253 /* Return the successor of VAL in its type, handling the infinite case. */
4256 range_successor (tree val)
4258 tree type = TREE_TYPE (val);
4260 if (INTEGRAL_TYPE_P (type)
4261 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4264 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4267 /* Given two ranges, see if we can merge them into one. Return 1 if we
4268 can, 0 if we can't. Set the output range into the specified parameters. */
4271 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4272 tree high0, int in1_p, tree low1, tree high1)
4280 int lowequal = ((low0 == 0 && low1 == 0)
4281 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4282 low0, 0, low1, 0)));
4283 int highequal = ((high0 == 0 && high1 == 0)
4284 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4285 high0, 1, high1, 1)));
4287 /* Make range 0 be the range that starts first, or ends last if they
4288 start at the same value. Swap them if it isn't. */
4289 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4292 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4293 high1, 1, high0, 1))))
4295 temp = in0_p, in0_p = in1_p, in1_p = temp;
4296 tem = low0, low0 = low1, low1 = tem;
4297 tem = high0, high0 = high1, high1 = tem;
4300 /* Now flag two cases, whether the ranges are disjoint or whether the
4301 second range is totally subsumed in the first. Note that the tests
4302 below are simplified by the ones above. */
4303 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4304 high0, 1, low1, 0));
4305 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4306 high1, 1, high0, 1));
4308 /* We now have four cases, depending on whether we are including or
4309 excluding the two ranges. */
4312 /* If they don't overlap, the result is false. If the second range
4313 is a subset it is the result. Otherwise, the range is from the start
4314 of the second to the end of the first. */
4316 in_p = 0, low = high = 0;
4318 in_p = 1, low = low1, high = high1;
4320 in_p = 1, low = low1, high = high0;
4323 else if (in0_p && ! in1_p)
4325 /* If they don't overlap, the result is the first range. If they are
4326 equal, the result is false. If the second range is a subset of the
4327 first, and the ranges begin at the same place, we go from just after
4328 the end of the second range to the end of the first. If the second
4329 range is not a subset of the first, or if it is a subset and both
4330 ranges end at the same place, the range starts at the start of the
4331 first range and ends just before the second range.
4332 Otherwise, we can't describe this as a single range. */
4334 in_p = 1, low = low0, high = high0;
4335 else if (lowequal && highequal)
4336 in_p = 0, low = high = 0;
4337 else if (subset && lowequal)
4339 low = range_successor (high1);
4344 /* We are in the weird situation where high0 > high1 but
4345 high1 has no successor. Punt. */
4349 else if (! subset || highequal)
4352 high = range_predecessor (low1);
4356 /* low0 < low1 but low1 has no predecessor. Punt. */
4364 else if (! in0_p && in1_p)
4366 /* If they don't overlap, the result is the second range. If the second
4367 is a subset of the first, the result is false. Otherwise,
4368 the range starts just after the first range and ends at the
4369 end of the second. */
4371 in_p = 1, low = low1, high = high1;
4372 else if (subset || highequal)
4373 in_p = 0, low = high = 0;
4376 low = range_successor (high0);
4381 /* high1 > high0 but high0 has no successor. Punt. */
4389 /* The case where we are excluding both ranges. Here the complex case
4390 is if they don't overlap. In that case, the only time we have a
4391 range is if they are adjacent. If the second is a subset of the
4392 first, the result is the first. Otherwise, the range to exclude
4393 starts at the beginning of the first range and ends at the end of the
4397 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4398 range_successor (high0),
4400 in_p = 0, low = low0, high = high1;
4403 /* Canonicalize - [min, x] into - [-, x]. */
4404 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4405 switch (TREE_CODE (TREE_TYPE (low0)))
4408 if (TYPE_PRECISION (TREE_TYPE (low0))
4409 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4413 if (tree_int_cst_equal (low0,
4414 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4418 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4419 && integer_zerop (low0))
4426 /* Canonicalize - [x, max] into - [x, -]. */
4427 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4428 switch (TREE_CODE (TREE_TYPE (high1)))
4431 if (TYPE_PRECISION (TREE_TYPE (high1))
4432 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4436 if (tree_int_cst_equal (high1,
4437 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4441 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4442 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4444 integer_one_node, 1)))
4451 /* The ranges might be also adjacent between the maximum and
4452 minimum values of the given type. For
4453 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4454 return + [x + 1, y - 1]. */
4455 if (low0 == 0 && high1 == 0)
4457 low = range_successor (high0);
4458 high = range_predecessor (low1);
4459 if (low == 0 || high == 0)
4469 in_p = 0, low = low0, high = high0;
4471 in_p = 0, low = low0, high = high1;
4474 *pin_p = in_p, *plow = low, *phigh = high;
4479 /* Subroutine of fold, looking inside expressions of the form
4480 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4481 of the COND_EXPR. This function is being used also to optimize
4482 A op B ? C : A, by reversing the comparison first.
4484 Return a folded expression whose code is not a COND_EXPR
4485 anymore, or NULL_TREE if no folding opportunity is found. */
4488 fold_cond_expr_with_comparison (location_t loc, tree type,
4489 tree arg0, tree arg1, tree arg2)
4491 enum tree_code comp_code = TREE_CODE (arg0);
4492 tree arg00 = TREE_OPERAND (arg0, 0);
4493 tree arg01 = TREE_OPERAND (arg0, 1);
4494 tree arg1_type = TREE_TYPE (arg1);
4500 /* If we have A op 0 ? A : -A, consider applying the following
4503 A == 0? A : -A same as -A
4504 A != 0? A : -A same as A
4505 A >= 0? A : -A same as abs (A)
4506 A > 0? A : -A same as abs (A)
4507 A <= 0? A : -A same as -abs (A)
4508 A < 0? A : -A same as -abs (A)
4510 None of these transformations work for modes with signed
4511 zeros. If A is +/-0, the first two transformations will
4512 change the sign of the result (from +0 to -0, or vice
4513 versa). The last four will fix the sign of the result,
4514 even though the original expressions could be positive or
4515 negative, depending on the sign of A.
4517 Note that all these transformations are correct if A is
4518 NaN, since the two alternatives (A and -A) are also NaNs. */
4519 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4520 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4521 ? real_zerop (arg01)
4522 : integer_zerop (arg01))
4523 && ((TREE_CODE (arg2) == NEGATE_EXPR
4524 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4525 /* In the case that A is of the form X-Y, '-A' (arg2) may
4526 have already been folded to Y-X, check for that. */
4527 || (TREE_CODE (arg1) == MINUS_EXPR
4528 && TREE_CODE (arg2) == MINUS_EXPR
4529 && operand_equal_p (TREE_OPERAND (arg1, 0),
4530 TREE_OPERAND (arg2, 1), 0)
4531 && operand_equal_p (TREE_OPERAND (arg1, 1),
4532 TREE_OPERAND (arg2, 0), 0))))
4537 tem = fold_convert_loc (loc, arg1_type, arg1);
4538 return pedantic_non_lvalue_loc (loc,
4539 fold_convert_loc (loc, type,
4540 negate_expr (tem)));
4543 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4546 if (flag_trapping_math)
4551 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4552 arg1 = fold_convert_loc (loc, signed_type_for
4553 (TREE_TYPE (arg1)), arg1);
4554 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4555 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4558 if (flag_trapping_math)
4562 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4563 arg1 = fold_convert_loc (loc, signed_type_for
4564 (TREE_TYPE (arg1)), arg1);
4565 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4566 return negate_expr (fold_convert_loc (loc, type, tem));
4568 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4572 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4573 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4574 both transformations are correct when A is NaN: A != 0
4575 is then true, and A == 0 is false. */
4577 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4578 && integer_zerop (arg01) && integer_zerop (arg2))
4580 if (comp_code == NE_EXPR)
4581 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4582 else if (comp_code == EQ_EXPR)
4583 return build_int_cst (type, 0);
4586 /* Try some transformations of A op B ? A : B.
4588 A == B? A : B same as B
4589 A != B? A : B same as A
4590 A >= B? A : B same as max (A, B)
4591 A > B? A : B same as max (B, A)
4592 A <= B? A : B same as min (A, B)
4593 A < B? A : B same as min (B, A)
4595 As above, these transformations don't work in the presence
4596 of signed zeros. For example, if A and B are zeros of
4597 opposite sign, the first two transformations will change
4598 the sign of the result. In the last four, the original
4599 expressions give different results for (A=+0, B=-0) and
4600 (A=-0, B=+0), but the transformed expressions do not.
4602 The first two transformations are correct if either A or B
4603 is a NaN. In the first transformation, the condition will
4604 be false, and B will indeed be chosen. In the case of the
4605 second transformation, the condition A != B will be true,
4606 and A will be chosen.
4608 The conversions to max() and min() are not correct if B is
4609 a number and A is not. The conditions in the original
4610 expressions will be false, so all four give B. The min()
4611 and max() versions would give a NaN instead. */
4612 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4613 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4614 /* Avoid these transformations if the COND_EXPR may be used
4615 as an lvalue in the C++ front-end. PR c++/19199. */
4617 || (strcmp (lang_hooks.name, "GNU C++") != 0
4618 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4619 || ! maybe_lvalue_p (arg1)
4620 || ! maybe_lvalue_p (arg2)))
4622 tree comp_op0 = arg00;
4623 tree comp_op1 = arg01;
4624 tree comp_type = TREE_TYPE (comp_op0);
4626 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4627 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4637 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4639 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4644 /* In C++ a ?: expression can be an lvalue, so put the
4645 operand which will be used if they are equal first
4646 so that we can convert this back to the
4647 corresponding COND_EXPR. */
4648 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4650 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4651 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4652 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4653 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4654 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4655 comp_op1, comp_op0);
4656 return pedantic_non_lvalue_loc (loc,
4657 fold_convert_loc (loc, type, tem));
4664 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4666 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4667 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4668 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4669 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4670 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4671 comp_op1, comp_op0);
4672 return pedantic_non_lvalue_loc (loc,
4673 fold_convert_loc (loc, type, tem));
4677 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4678 return pedantic_non_lvalue_loc (loc,
4679 fold_convert_loc (loc, type, arg2));
4682 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4683 return pedantic_non_lvalue_loc (loc,
4684 fold_convert_loc (loc, type, arg1));
4687 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4692 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4693 we might still be able to simplify this. For example,
4694 if C1 is one less or one more than C2, this might have started
4695 out as a MIN or MAX and been transformed by this function.
4696 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4698 if (INTEGRAL_TYPE_P (type)
4699 && TREE_CODE (arg01) == INTEGER_CST
4700 && TREE_CODE (arg2) == INTEGER_CST)
4704 if (TREE_CODE (arg1) == INTEGER_CST)
4706 /* We can replace A with C1 in this case. */
4707 arg1 = fold_convert_loc (loc, type, arg01);
4708 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4711 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4712 MIN_EXPR, to preserve the signedness of the comparison. */
4713 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4715 && operand_equal_p (arg01,
4716 const_binop (PLUS_EXPR, arg2,
4717 build_int_cst (type, 1)),
4720 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4721 fold_convert_loc (loc, TREE_TYPE (arg00),
4723 return pedantic_non_lvalue_loc (loc,
4724 fold_convert_loc (loc, type, tem));
4729 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4731 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4733 && operand_equal_p (arg01,
4734 const_binop (MINUS_EXPR, arg2,
4735 build_int_cst (type, 1)),
4738 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4739 fold_convert_loc (loc, TREE_TYPE (arg00),
4741 return pedantic_non_lvalue_loc (loc,
4742 fold_convert_loc (loc, type, tem));
4747 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4748 MAX_EXPR, to preserve the signedness of the comparison. */
4749 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4751 && operand_equal_p (arg01,
4752 const_binop (MINUS_EXPR, arg2,
4753 build_int_cst (type, 1)),
4756 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4757 fold_convert_loc (loc, TREE_TYPE (arg00),
4759 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4764 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4765 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4767 && operand_equal_p (arg01,
4768 const_binop (PLUS_EXPR, arg2,
4769 build_int_cst (type, 1)),
4772 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4773 fold_convert_loc (loc, TREE_TYPE (arg00),
4775 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4789 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4790 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4791 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4795 /* EXP is some logical combination of boolean tests. See if we can
4796 merge it into some range test. Return the new tree if so. */
4799 fold_range_test (location_t loc, enum tree_code code, tree type,
4802 int or_op = (code == TRUTH_ORIF_EXPR
4803 || code == TRUTH_OR_EXPR);
4804 int in0_p, in1_p, in_p;
4805 tree low0, low1, low, high0, high1, high;
4806 bool strict_overflow_p = false;
4807 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4808 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4810 const char * const warnmsg = G_("assuming signed overflow does not occur "
4811 "when simplifying range test");
4813 /* If this is an OR operation, invert both sides; we will invert
4814 again at the end. */
4816 in0_p = ! in0_p, in1_p = ! in1_p;
4818 /* If both expressions are the same, if we can merge the ranges, and we
4819 can build the range test, return it or it inverted. If one of the
4820 ranges is always true or always false, consider it to be the same
4821 expression as the other. */
4822 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4823 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4825 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4827 : rhs != 0 ? rhs : integer_zero_node,
4830 if (strict_overflow_p)
4831 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4832 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4835 /* On machines where the branch cost is expensive, if this is a
4836 short-circuited branch and the underlying object on both sides
4837 is the same, make a non-short-circuit operation. */
4838 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4839 && lhs != 0 && rhs != 0
4840 && (code == TRUTH_ANDIF_EXPR
4841 || code == TRUTH_ORIF_EXPR)
4842 && operand_equal_p (lhs, rhs, 0))
4844 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4845 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4846 which cases we can't do this. */
4847 if (simple_operand_p (lhs))
4848 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4849 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4852 else if (lang_hooks.decls.global_bindings_p () == 0
4853 && ! CONTAINS_PLACEHOLDER_P (lhs))
4855 tree common = save_expr (lhs);
4857 if (0 != (lhs = build_range_check (loc, type, common,
4858 or_op ? ! in0_p : in0_p,
4860 && (0 != (rhs = build_range_check (loc, type, common,
4861 or_op ? ! in1_p : in1_p,
4864 if (strict_overflow_p)
4865 fold_overflow_warning (warnmsg,
4866 WARN_STRICT_OVERFLOW_COMPARISON);
4867 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4868 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4877 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4878 bit value. Arrange things so the extra bits will be set to zero if and
4879 only if C is signed-extended to its full width. If MASK is nonzero,
4880 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4883 unextend (tree c, int p, int unsignedp, tree mask)
4885 tree type = TREE_TYPE (c);
4886 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4889 if (p == modesize || unsignedp)
4892 /* We work by getting just the sign bit into the low-order bit, then
4893 into the high-order bit, then sign-extend. We then XOR that value
4895 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4896 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4898 /* We must use a signed type in order to get an arithmetic right shift.
4899 However, we must also avoid introducing accidental overflows, so that
4900 a subsequent call to integer_zerop will work. Hence we must
4901 do the type conversion here. At this point, the constant is either
4902 zero or one, and the conversion to a signed type can never overflow.
4903 We could get an overflow if this conversion is done anywhere else. */
4904 if (TYPE_UNSIGNED (type))
4905 temp = fold_convert (signed_type_for (type), temp);
4907 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4908 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4910 temp = const_binop (BIT_AND_EXPR, temp,
4911 fold_convert (TREE_TYPE (c), mask));
4912 /* If necessary, convert the type back to match the type of C. */
4913 if (TYPE_UNSIGNED (type))
4914 temp = fold_convert (type, temp);
4916 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4919 /* For an expression that has the form
4923 we can drop one of the inner expressions and simplify to
4927 LOC is the location of the resulting expression. OP is the inner
4928 logical operation; the left-hand side in the examples above, while CMPOP
4929 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4930 removing a condition that guards another, as in
4931 (A != NULL && A->...) || A == NULL
4932 which we must not transform. If RHS_ONLY is true, only eliminate the
4933 right-most operand of the inner logical operation. */
4936 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4939 tree type = TREE_TYPE (cmpop);
4940 enum tree_code code = TREE_CODE (cmpop);
4941 enum tree_code truthop_code = TREE_CODE (op);
4942 tree lhs = TREE_OPERAND (op, 0);
4943 tree rhs = TREE_OPERAND (op, 1);
4944 tree orig_lhs = lhs, orig_rhs = rhs;
4945 enum tree_code rhs_code = TREE_CODE (rhs);
4946 enum tree_code lhs_code = TREE_CODE (lhs);
4947 enum tree_code inv_code;
4949 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4952 if (TREE_CODE_CLASS (code) != tcc_comparison)
4955 if (rhs_code == truthop_code)
4957 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4958 if (newrhs != NULL_TREE)
4961 rhs_code = TREE_CODE (rhs);
4964 if (lhs_code == truthop_code && !rhs_only)
4966 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4967 if (newlhs != NULL_TREE)
4970 lhs_code = TREE_CODE (lhs);
4974 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4975 if (inv_code == rhs_code
4976 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4977 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4979 if (!rhs_only && inv_code == lhs_code
4980 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4981 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4983 if (rhs != orig_rhs || lhs != orig_lhs)
4984 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
4989 /* Find ways of folding logical expressions of LHS and RHS:
4990 Try to merge two comparisons to the same innermost item.
4991 Look for range tests like "ch >= '0' && ch <= '9'".
4992 Look for combinations of simple terms on machines with expensive branches
4993 and evaluate the RHS unconditionally.
4995 For example, if we have p->a == 2 && p->b == 4 and we can make an
4996 object large enough to span both A and B, we can do this with a comparison
4997 against the object ANDed with the a mask.
4999 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5000 operations to do this with one comparison.
5002 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5003 function and the one above.
5005 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5006 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5008 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5011 We return the simplified tree or 0 if no optimization is possible. */
5014 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5017 /* If this is the "or" of two comparisons, we can do something if
5018 the comparisons are NE_EXPR. If this is the "and", we can do something
5019 if the comparisons are EQ_EXPR. I.e.,
5020 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5022 WANTED_CODE is this operation code. For single bit fields, we can
5023 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5024 comparison for one-bit fields. */
5026 enum tree_code wanted_code;
5027 enum tree_code lcode, rcode;
5028 tree ll_arg, lr_arg, rl_arg, rr_arg;
5029 tree ll_inner, lr_inner, rl_inner, rr_inner;
5030 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5031 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5032 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5033 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5034 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5035 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5036 enum machine_mode lnmode, rnmode;
5037 tree ll_mask, lr_mask, rl_mask, rr_mask;
5038 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5039 tree l_const, r_const;
5040 tree lntype, rntype, result;
5041 HOST_WIDE_INT first_bit, end_bit;
5043 tree orig_lhs = lhs, orig_rhs = rhs;
5044 enum tree_code orig_code = code;
5046 /* Start by getting the comparison codes. Fail if anything is volatile.
5047 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5048 it were surrounded with a NE_EXPR. */
5050 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5053 lcode = TREE_CODE (lhs);
5054 rcode = TREE_CODE (rhs);
5056 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5058 lhs = build2 (NE_EXPR, truth_type, lhs,
5059 build_int_cst (TREE_TYPE (lhs), 0));
5063 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5065 rhs = build2 (NE_EXPR, truth_type, rhs,
5066 build_int_cst (TREE_TYPE (rhs), 0));
5070 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5071 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5074 ll_arg = TREE_OPERAND (lhs, 0);
5075 lr_arg = TREE_OPERAND (lhs, 1);
5076 rl_arg = TREE_OPERAND (rhs, 0);
5077 rr_arg = TREE_OPERAND (rhs, 1);
5079 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5080 if (simple_operand_p (ll_arg)
5081 && simple_operand_p (lr_arg))
5083 if (operand_equal_p (ll_arg, rl_arg, 0)
5084 && operand_equal_p (lr_arg, rr_arg, 0))
5086 result = combine_comparisons (loc, code, lcode, rcode,
5087 truth_type, ll_arg, lr_arg);
5091 else if (operand_equal_p (ll_arg, rr_arg, 0)
5092 && operand_equal_p (lr_arg, rl_arg, 0))
5094 result = combine_comparisons (loc, code, lcode,
5095 swap_tree_comparison (rcode),
5096 truth_type, ll_arg, lr_arg);
5102 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5103 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5105 /* If the RHS can be evaluated unconditionally and its operands are
5106 simple, it wins to evaluate the RHS unconditionally on machines
5107 with expensive branches. In this case, this isn't a comparison
5108 that can be merged. Avoid doing this if the RHS is a floating-point
5109 comparison since those can trap. */
5111 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5113 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5114 && simple_operand_p (rl_arg)
5115 && simple_operand_p (rr_arg))
5117 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5118 if (code == TRUTH_OR_EXPR
5119 && lcode == NE_EXPR && integer_zerop (lr_arg)
5120 && rcode == NE_EXPR && integer_zerop (rr_arg)
5121 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5122 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5123 return build2_loc (loc, NE_EXPR, truth_type,
5124 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5126 build_int_cst (TREE_TYPE (ll_arg), 0));
5128 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5129 if (code == TRUTH_AND_EXPR
5130 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5131 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5132 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5133 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5134 return build2_loc (loc, EQ_EXPR, truth_type,
5135 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5137 build_int_cst (TREE_TYPE (ll_arg), 0));
5139 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5141 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5142 return build2_loc (loc, code, truth_type, lhs, rhs);
5147 /* See if the comparisons can be merged. Then get all the parameters for
5150 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5151 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5155 ll_inner = decode_field_reference (loc, ll_arg,
5156 &ll_bitsize, &ll_bitpos, &ll_mode,
5157 &ll_unsignedp, &volatilep, &ll_mask,
5159 lr_inner = decode_field_reference (loc, lr_arg,
5160 &lr_bitsize, &lr_bitpos, &lr_mode,
5161 &lr_unsignedp, &volatilep, &lr_mask,
5163 rl_inner = decode_field_reference (loc, rl_arg,
5164 &rl_bitsize, &rl_bitpos, &rl_mode,
5165 &rl_unsignedp, &volatilep, &rl_mask,
5167 rr_inner = decode_field_reference (loc, rr_arg,
5168 &rr_bitsize, &rr_bitpos, &rr_mode,
5169 &rr_unsignedp, &volatilep, &rr_mask,
5172 /* It must be true that the inner operation on the lhs of each
5173 comparison must be the same if we are to be able to do anything.
5174 Then see if we have constants. If not, the same must be true for
5176 if (volatilep || ll_inner == 0 || rl_inner == 0
5177 || ! operand_equal_p (ll_inner, rl_inner, 0))
5180 if (TREE_CODE (lr_arg) == INTEGER_CST
5181 && TREE_CODE (rr_arg) == INTEGER_CST)
5182 l_const = lr_arg, r_const = rr_arg;
5183 else if (lr_inner == 0 || rr_inner == 0
5184 || ! operand_equal_p (lr_inner, rr_inner, 0))
5187 l_const = r_const = 0;
5189 /* If either comparison code is not correct for our logical operation,
5190 fail. However, we can convert a one-bit comparison against zero into
5191 the opposite comparison against that bit being set in the field. */
5193 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5194 if (lcode != wanted_code)
5196 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5198 /* Make the left operand unsigned, since we are only interested
5199 in the value of one bit. Otherwise we are doing the wrong
5208 /* This is analogous to the code for l_const above. */
5209 if (rcode != wanted_code)
5211 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5220 /* See if we can find a mode that contains both fields being compared on
5221 the left. If we can't, fail. Otherwise, update all constants and masks
5222 to be relative to a field of that size. */
5223 first_bit = MIN (ll_bitpos, rl_bitpos);
5224 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5225 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5226 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5228 if (lnmode == VOIDmode)
5231 lnbitsize = GET_MODE_BITSIZE (lnmode);
5232 lnbitpos = first_bit & ~ (lnbitsize - 1);
5233 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5234 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5236 if (BYTES_BIG_ENDIAN)
5238 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5239 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5242 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5243 size_int (xll_bitpos));
5244 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5245 size_int (xrl_bitpos));
5249 l_const = fold_convert_loc (loc, lntype, l_const);
5250 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5251 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5252 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5253 fold_build1_loc (loc, BIT_NOT_EXPR,
5256 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5258 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5263 r_const = fold_convert_loc (loc, lntype, r_const);
5264 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5265 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5266 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5267 fold_build1_loc (loc, BIT_NOT_EXPR,
5270 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5272 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5276 /* If the right sides are not constant, do the same for it. Also,
5277 disallow this optimization if a size or signedness mismatch occurs
5278 between the left and right sides. */
5281 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5282 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5283 /* Make sure the two fields on the right
5284 correspond to the left without being swapped. */
5285 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5288 first_bit = MIN (lr_bitpos, rr_bitpos);
5289 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5290 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5291 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5293 if (rnmode == VOIDmode)
5296 rnbitsize = GET_MODE_BITSIZE (rnmode);
5297 rnbitpos = first_bit & ~ (rnbitsize - 1);
5298 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5299 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5301 if (BYTES_BIG_ENDIAN)
5303 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5304 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5307 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5309 size_int (xlr_bitpos));
5310 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5312 size_int (xrr_bitpos));
5314 /* Make a mask that corresponds to both fields being compared.
5315 Do this for both items being compared. If the operands are the
5316 same size and the bits being compared are in the same position
5317 then we can do this by masking both and comparing the masked
5319 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5320 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5321 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5323 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5324 ll_unsignedp || rl_unsignedp);
5325 if (! all_ones_mask_p (ll_mask, lnbitsize))
5326 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5328 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5329 lr_unsignedp || rr_unsignedp);
5330 if (! all_ones_mask_p (lr_mask, rnbitsize))
5331 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5333 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5336 /* There is still another way we can do something: If both pairs of
5337 fields being compared are adjacent, we may be able to make a wider
5338 field containing them both.
5340 Note that we still must mask the lhs/rhs expressions. Furthermore,
5341 the mask must be shifted to account for the shift done by
5342 make_bit_field_ref. */
5343 if ((ll_bitsize + ll_bitpos == rl_bitpos
5344 && lr_bitsize + lr_bitpos == rr_bitpos)
5345 || (ll_bitpos == rl_bitpos + rl_bitsize
5346 && lr_bitpos == rr_bitpos + rr_bitsize))
5350 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5351 ll_bitsize + rl_bitsize,
5352 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5353 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5354 lr_bitsize + rr_bitsize,
5355 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5357 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5358 size_int (MIN (xll_bitpos, xrl_bitpos)));
5359 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5360 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5362 /* Convert to the smaller type before masking out unwanted bits. */
5364 if (lntype != rntype)
5366 if (lnbitsize > rnbitsize)
5368 lhs = fold_convert_loc (loc, rntype, lhs);
5369 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5372 else if (lnbitsize < rnbitsize)
5374 rhs = fold_convert_loc (loc, lntype, rhs);
5375 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5380 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5381 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5383 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5384 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5386 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5392 /* Handle the case of comparisons with constants. If there is something in
5393 common between the masks, those bits of the constants must be the same.
5394 If not, the condition is always false. Test for this to avoid generating
5395 incorrect code below. */
5396 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5397 if (! integer_zerop (result)
5398 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5399 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5401 if (wanted_code == NE_EXPR)
5403 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5404 return constant_boolean_node (true, truth_type);
5408 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5409 return constant_boolean_node (false, truth_type);
5413 /* Construct the expression we will return. First get the component
5414 reference we will make. Unless the mask is all ones the width of
5415 that field, perform the mask operation. Then compare with the
5417 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5418 ll_unsignedp || rl_unsignedp);
5420 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5421 if (! all_ones_mask_p (ll_mask, lnbitsize))
5422 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5424 return build2_loc (loc, wanted_code, truth_type, result,
5425 const_binop (BIT_IOR_EXPR, l_const, r_const));
5428 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5432 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5436 enum tree_code op_code;
5439 int consts_equal, consts_lt;
5442 STRIP_SIGN_NOPS (arg0);
5444 op_code = TREE_CODE (arg0);
5445 minmax_const = TREE_OPERAND (arg0, 1);
5446 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5447 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5448 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5449 inner = TREE_OPERAND (arg0, 0);
5451 /* If something does not permit us to optimize, return the original tree. */
5452 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5453 || TREE_CODE (comp_const) != INTEGER_CST
5454 || TREE_OVERFLOW (comp_const)
5455 || TREE_CODE (minmax_const) != INTEGER_CST
5456 || TREE_OVERFLOW (minmax_const))
5459 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5460 and GT_EXPR, doing the rest with recursive calls using logical
5464 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5467 = optimize_minmax_comparison (loc,
5468 invert_tree_comparison (code, false),
5471 return invert_truthvalue_loc (loc, tem);
5477 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5478 optimize_minmax_comparison
5479 (loc, EQ_EXPR, type, arg0, comp_const),
5480 optimize_minmax_comparison
5481 (loc, GT_EXPR, type, arg0, comp_const));
5484 if (op_code == MAX_EXPR && consts_equal)
5485 /* MAX (X, 0) == 0 -> X <= 0 */
5486 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5488 else if (op_code == MAX_EXPR && consts_lt)
5489 /* MAX (X, 0) == 5 -> X == 5 */
5490 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5492 else if (op_code == MAX_EXPR)
5493 /* MAX (X, 0) == -1 -> false */
5494 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5496 else if (consts_equal)
5497 /* MIN (X, 0) == 0 -> X >= 0 */
5498 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5501 /* MIN (X, 0) == 5 -> false */
5502 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5505 /* MIN (X, 0) == -1 -> X == -1 */
5506 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5509 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5510 /* MAX (X, 0) > 0 -> X > 0
5511 MAX (X, 0) > 5 -> X > 5 */
5512 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5514 else if (op_code == MAX_EXPR)
5515 /* MAX (X, 0) > -1 -> true */
5516 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5518 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5519 /* MIN (X, 0) > 0 -> false
5520 MIN (X, 0) > 5 -> false */
5521 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5524 /* MIN (X, 0) > -1 -> X > -1 */
5525 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5532 /* T is an integer expression that is being multiplied, divided, or taken a
5533 modulus (CODE says which and what kind of divide or modulus) by a
5534 constant C. See if we can eliminate that operation by folding it with
5535 other operations already in T. WIDE_TYPE, if non-null, is a type that
5536 should be used for the computation if wider than our type.
5538 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5539 (X * 2) + (Y * 4). We must, however, be assured that either the original
5540 expression would not overflow or that overflow is undefined for the type
5541 in the language in question.
5543 If we return a non-null expression, it is an equivalent form of the
5544 original computation, but need not be in the original type.
5546 We set *STRICT_OVERFLOW_P to true if the return values depends on
5547 signed overflow being undefined. Otherwise we do not change
5548 *STRICT_OVERFLOW_P. */
5551 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5552 bool *strict_overflow_p)
5554 /* To avoid exponential search depth, refuse to allow recursion past
5555 three levels. Beyond that (1) it's highly unlikely that we'll find
5556 something interesting and (2) we've probably processed it before
5557 when we built the inner expression. */
5566 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5573 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5574 bool *strict_overflow_p)
5576 tree type = TREE_TYPE (t);
5577 enum tree_code tcode = TREE_CODE (t);
5578 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5579 > GET_MODE_SIZE (TYPE_MODE (type)))
5580 ? wide_type : type);
5582 int same_p = tcode == code;
5583 tree op0 = NULL_TREE, op1 = NULL_TREE;
5584 bool sub_strict_overflow_p;
5586 /* Don't deal with constants of zero here; they confuse the code below. */
5587 if (integer_zerop (c))
5590 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5591 op0 = TREE_OPERAND (t, 0);
5593 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5594 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5596 /* Note that we need not handle conditional operations here since fold
5597 already handles those cases. So just do arithmetic here. */
5601 /* For a constant, we can always simplify if we are a multiply
5602 or (for divide and modulus) if it is a multiple of our constant. */
5603 if (code == MULT_EXPR
5604 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5605 return const_binop (code, fold_convert (ctype, t),
5606 fold_convert (ctype, c));
5609 CASE_CONVERT: case NON_LVALUE_EXPR:
5610 /* If op0 is an expression ... */
5611 if ((COMPARISON_CLASS_P (op0)
5612 || UNARY_CLASS_P (op0)
5613 || BINARY_CLASS_P (op0)
5614 || VL_EXP_CLASS_P (op0)
5615 || EXPRESSION_CLASS_P (op0))
5616 /* ... and has wrapping overflow, and its type is smaller
5617 than ctype, then we cannot pass through as widening. */
5618 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5619 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5620 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5621 && (TYPE_PRECISION (ctype)
5622 > TYPE_PRECISION (TREE_TYPE (op0))))
5623 /* ... or this is a truncation (t is narrower than op0),
5624 then we cannot pass through this narrowing. */
5625 || (TYPE_PRECISION (type)
5626 < TYPE_PRECISION (TREE_TYPE (op0)))
5627 /* ... or signedness changes for division or modulus,
5628 then we cannot pass through this conversion. */
5629 || (code != MULT_EXPR
5630 && (TYPE_UNSIGNED (ctype)
5631 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5632 /* ... or has undefined overflow while the converted to
5633 type has not, we cannot do the operation in the inner type
5634 as that would introduce undefined overflow. */
5635 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5636 && !TYPE_OVERFLOW_UNDEFINED (type))))
5639 /* Pass the constant down and see if we can make a simplification. If
5640 we can, replace this expression with the inner simplification for
5641 possible later conversion to our or some other type. */
5642 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5643 && TREE_CODE (t2) == INTEGER_CST
5644 && !TREE_OVERFLOW (t2)
5645 && (0 != (t1 = extract_muldiv (op0, t2, code,
5647 ? ctype : NULL_TREE,
5648 strict_overflow_p))))
5653 /* If widening the type changes it from signed to unsigned, then we
5654 must avoid building ABS_EXPR itself as unsigned. */
5655 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5657 tree cstype = (*signed_type_for) (ctype);
5658 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5661 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5662 return fold_convert (ctype, t1);
5666 /* If the constant is negative, we cannot simplify this. */
5667 if (tree_int_cst_sgn (c) == -1)
5671 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5673 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5676 case MIN_EXPR: case MAX_EXPR:
5677 /* If widening the type changes the signedness, then we can't perform
5678 this optimization as that changes the result. */
5679 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5682 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5683 sub_strict_overflow_p = false;
5684 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5685 &sub_strict_overflow_p)) != 0
5686 && (t2 = extract_muldiv (op1, c, code, wide_type,
5687 &sub_strict_overflow_p)) != 0)
5689 if (tree_int_cst_sgn (c) < 0)
5690 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5691 if (sub_strict_overflow_p)
5692 *strict_overflow_p = true;
5693 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5694 fold_convert (ctype, t2));
5698 case LSHIFT_EXPR: case RSHIFT_EXPR:
5699 /* If the second operand is constant, this is a multiplication
5700 or floor division, by a power of two, so we can treat it that
5701 way unless the multiplier or divisor overflows. Signed
5702 left-shift overflow is implementation-defined rather than
5703 undefined in C90, so do not convert signed left shift into
5705 if (TREE_CODE (op1) == INTEGER_CST
5706 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5707 /* const_binop may not detect overflow correctly,
5708 so check for it explicitly here. */
5709 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5710 && TREE_INT_CST_HIGH (op1) == 0
5711 && 0 != (t1 = fold_convert (ctype,
5712 const_binop (LSHIFT_EXPR,
5715 && !TREE_OVERFLOW (t1))
5716 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5717 ? MULT_EXPR : FLOOR_DIV_EXPR,
5719 fold_convert (ctype, op0),
5721 c, code, wide_type, strict_overflow_p);
5724 case PLUS_EXPR: case MINUS_EXPR:
5725 /* See if we can eliminate the operation on both sides. If we can, we
5726 can return a new PLUS or MINUS. If we can't, the only remaining
5727 cases where we can do anything are if the second operand is a
5729 sub_strict_overflow_p = false;
5730 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5731 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5732 if (t1 != 0 && t2 != 0
5733 && (code == MULT_EXPR
5734 /* If not multiplication, we can only do this if both operands
5735 are divisible by c. */
5736 || (multiple_of_p (ctype, op0, c)
5737 && multiple_of_p (ctype, op1, c))))
5739 if (sub_strict_overflow_p)
5740 *strict_overflow_p = true;
5741 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5742 fold_convert (ctype, t2));
5745 /* If this was a subtraction, negate OP1 and set it to be an addition.
5746 This simplifies the logic below. */
5747 if (tcode == MINUS_EXPR)
5749 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5750 /* If OP1 was not easily negatable, the constant may be OP0. */
5751 if (TREE_CODE (op0) == INTEGER_CST)
5762 if (TREE_CODE (op1) != INTEGER_CST)
5765 /* If either OP1 or C are negative, this optimization is not safe for
5766 some of the division and remainder types while for others we need
5767 to change the code. */
5768 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5770 if (code == CEIL_DIV_EXPR)
5771 code = FLOOR_DIV_EXPR;
5772 else if (code == FLOOR_DIV_EXPR)
5773 code = CEIL_DIV_EXPR;
5774 else if (code != MULT_EXPR
5775 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5779 /* If it's a multiply or a division/modulus operation of a multiple
5780 of our constant, do the operation and verify it doesn't overflow. */
5781 if (code == MULT_EXPR
5782 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5784 op1 = const_binop (code, fold_convert (ctype, op1),
5785 fold_convert (ctype, c));
5786 /* We allow the constant to overflow with wrapping semantics. */
5788 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5794 /* If we have an unsigned type is not a sizetype, we cannot widen
5795 the operation since it will change the result if the original
5796 computation overflowed. */
5797 if (TYPE_UNSIGNED (ctype)
5798 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5802 /* If we were able to eliminate our operation from the first side,
5803 apply our operation to the second side and reform the PLUS. */
5804 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5805 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5807 /* The last case is if we are a multiply. In that case, we can
5808 apply the distributive law to commute the multiply and addition
5809 if the multiplication of the constants doesn't overflow. */
5810 if (code == MULT_EXPR)
5811 return fold_build2 (tcode, ctype,
5812 fold_build2 (code, ctype,
5813 fold_convert (ctype, op0),
5814 fold_convert (ctype, c)),
5820 /* We have a special case here if we are doing something like
5821 (C * 8) % 4 since we know that's zero. */
5822 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5823 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5824 /* If the multiplication can overflow we cannot optimize this.
5825 ??? Until we can properly mark individual operations as
5826 not overflowing we need to treat sizetype special here as
5827 stor-layout relies on this opimization to make
5828 DECL_FIELD_BIT_OFFSET always a constant. */
5829 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5830 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5831 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5832 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5833 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5835 *strict_overflow_p = true;
5836 return omit_one_operand (type, integer_zero_node, op0);
5839 /* ... fall through ... */
5841 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5842 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5843 /* If we can extract our operation from the LHS, do so and return a
5844 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5845 do something only if the second operand is a constant. */
5847 && (t1 = extract_muldiv (op0, c, code, wide_type,
5848 strict_overflow_p)) != 0)
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5850 fold_convert (ctype, op1));
5851 else if (tcode == MULT_EXPR && code == MULT_EXPR
5852 && (t1 = extract_muldiv (op1, c, code, wide_type,
5853 strict_overflow_p)) != 0)
5854 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5855 fold_convert (ctype, t1));
5856 else if (TREE_CODE (op1) != INTEGER_CST)
5859 /* If these are the same operation types, we can associate them
5860 assuming no overflow. */
5865 mul = double_int_mul_with_sign
5867 (tree_to_double_int (op1),
5868 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5870 (tree_to_double_int (c),
5871 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5872 false, &overflow_p);
5873 overflow_p = (((!TYPE_UNSIGNED (ctype)
5874 || (TREE_CODE (ctype) == INTEGER_TYPE
5875 && TYPE_IS_SIZETYPE (ctype)))
5877 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5878 if (!double_int_fits_to_tree_p (ctype, mul)
5879 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5880 || !TYPE_UNSIGNED (ctype)
5881 || (TREE_CODE (ctype) == INTEGER_TYPE
5882 && TYPE_IS_SIZETYPE (ctype))))
5885 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5886 double_int_to_tree (ctype, mul));
5889 /* If these operations "cancel" each other, we have the main
5890 optimizations of this pass, which occur when either constant is a
5891 multiple of the other, in which case we replace this with either an
5892 operation or CODE or TCODE.
5894 If we have an unsigned type that is not a sizetype, we cannot do
5895 this since it will change the result if the original computation
5897 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5898 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5899 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5900 || (tcode == MULT_EXPR
5901 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5902 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5903 && code != MULT_EXPR)))
5905 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5907 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5908 *strict_overflow_p = true;
5909 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5910 fold_convert (ctype,
5911 const_binop (TRUNC_DIV_EXPR,
5914 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5916 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5917 *strict_overflow_p = true;
5918 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5919 fold_convert (ctype,
5920 const_binop (TRUNC_DIV_EXPR,
5933 /* Return a node which has the indicated constant VALUE (either 0 or
5934 1), and is of the indicated TYPE. */
5937 constant_boolean_node (int value, tree type)
5939 if (type == integer_type_node)
5940 return value ? integer_one_node : integer_zero_node;
5941 else if (type == boolean_type_node)
5942 return value ? boolean_true_node : boolean_false_node;
5944 return build_int_cst (type, value);
5948 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5949 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5950 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5951 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5952 COND is the first argument to CODE; otherwise (as in the example
5953 given here), it is the second argument. TYPE is the type of the
5954 original expression. Return NULL_TREE if no simplification is
5958 fold_binary_op_with_conditional_arg (location_t loc,
5959 enum tree_code code,
5960 tree type, tree op0, tree op1,
5961 tree cond, tree arg, int cond_first_p)
5963 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5964 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5965 tree test, true_value, false_value;
5966 tree lhs = NULL_TREE;
5967 tree rhs = NULL_TREE;
5969 if (TREE_CODE (cond) == COND_EXPR)
5971 test = TREE_OPERAND (cond, 0);
5972 true_value = TREE_OPERAND (cond, 1);
5973 false_value = TREE_OPERAND (cond, 2);
5974 /* If this operand throws an expression, then it does not make
5975 sense to try to perform a logical or arithmetic operation
5977 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5979 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5984 tree testtype = TREE_TYPE (cond);
5986 true_value = constant_boolean_node (true, testtype);
5987 false_value = constant_boolean_node (false, testtype);
5990 /* This transformation is only worthwhile if we don't have to wrap ARG
5991 in a SAVE_EXPR and the operation can be simplified on at least one
5992 of the branches once its pushed inside the COND_EXPR. */
5993 if (!TREE_CONSTANT (arg)
5994 && (TREE_SIDE_EFFECTS (arg)
5995 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
5998 arg = fold_convert_loc (loc, arg_type, arg);
6001 true_value = fold_convert_loc (loc, cond_type, true_value);
6003 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6005 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6009 false_value = fold_convert_loc (loc, cond_type, false_value);
6011 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6013 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6016 /* Check that we have simplified at least one of the branches. */
6017 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6020 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6024 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6026 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6027 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6028 ADDEND is the same as X.
6030 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6031 and finite. The problematic cases are when X is zero, and its mode
6032 has signed zeros. In the case of rounding towards -infinity,
6033 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6034 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6037 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6039 if (!real_zerop (addend))
6042 /* Don't allow the fold with -fsignaling-nans. */
6043 if (HONOR_SNANS (TYPE_MODE (type)))
6046 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6047 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6050 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6051 if (TREE_CODE (addend) == REAL_CST
6052 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6055 /* The mode has signed zeros, and we have to honor their sign.
6056 In this situation, there is only one case we can return true for.
6057 X - 0 is the same as X unless rounding towards -infinity is
6059 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6062 /* Subroutine of fold() that checks comparisons of built-in math
6063 functions against real constants.
6065 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6066 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6067 is the type of the result and ARG0 and ARG1 are the operands of the
6068 comparison. ARG1 must be a TREE_REAL_CST.
6070 The function returns the constant folded tree if a simplification
6071 can be made, and NULL_TREE otherwise. */
6074 fold_mathfn_compare (location_t loc,
6075 enum built_in_function fcode, enum tree_code code,
6076 tree type, tree arg0, tree arg1)
6080 if (BUILTIN_SQRT_P (fcode))
6082 tree arg = CALL_EXPR_ARG (arg0, 0);
6083 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6085 c = TREE_REAL_CST (arg1);
6086 if (REAL_VALUE_NEGATIVE (c))
6088 /* sqrt(x) < y is always false, if y is negative. */
6089 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6090 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6092 /* sqrt(x) > y is always true, if y is negative and we
6093 don't care about NaNs, i.e. negative values of x. */
6094 if (code == NE_EXPR || !HONOR_NANS (mode))
6095 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6097 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6098 return fold_build2_loc (loc, GE_EXPR, type, arg,
6099 build_real (TREE_TYPE (arg), dconst0));
6101 else if (code == GT_EXPR || code == GE_EXPR)
6105 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6106 real_convert (&c2, mode, &c2);
6108 if (REAL_VALUE_ISINF (c2))
6110 /* sqrt(x) > y is x == +Inf, when y is very large. */
6111 if (HONOR_INFINITIES (mode))
6112 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6113 build_real (TREE_TYPE (arg), c2));
6115 /* sqrt(x) > y is always false, when y is very large
6116 and we don't care about infinities. */
6117 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6120 /* sqrt(x) > c is the same as x > c*c. */
6121 return fold_build2_loc (loc, code, type, arg,
6122 build_real (TREE_TYPE (arg), c2));
6124 else if (code == LT_EXPR || code == LE_EXPR)
6128 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6129 real_convert (&c2, mode, &c2);
6131 if (REAL_VALUE_ISINF (c2))
6133 /* sqrt(x) < y is always true, when y is a very large
6134 value and we don't care about NaNs or Infinities. */
6135 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6136 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6138 /* sqrt(x) < y is x != +Inf when y is very large and we
6139 don't care about NaNs. */
6140 if (! HONOR_NANS (mode))
6141 return fold_build2_loc (loc, NE_EXPR, type, arg,
6142 build_real (TREE_TYPE (arg), c2));
6144 /* sqrt(x) < y is x >= 0 when y is very large and we
6145 don't care about Infinities. */
6146 if (! HONOR_INFINITIES (mode))
6147 return fold_build2_loc (loc, GE_EXPR, type, arg,
6148 build_real (TREE_TYPE (arg), dconst0));
6150 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6151 if (lang_hooks.decls.global_bindings_p () != 0
6152 || CONTAINS_PLACEHOLDER_P (arg))
6155 arg = save_expr (arg);
6156 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6157 fold_build2_loc (loc, GE_EXPR, type, arg,
6158 build_real (TREE_TYPE (arg),
6160 fold_build2_loc (loc, NE_EXPR, type, arg,
6161 build_real (TREE_TYPE (arg),
6165 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6166 if (! HONOR_NANS (mode))
6167 return fold_build2_loc (loc, code, type, arg,
6168 build_real (TREE_TYPE (arg), c2));
6170 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6171 if (lang_hooks.decls.global_bindings_p () == 0
6172 && ! CONTAINS_PLACEHOLDER_P (arg))
6174 arg = save_expr (arg);
6175 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6176 fold_build2_loc (loc, GE_EXPR, type, arg,
6177 build_real (TREE_TYPE (arg),
6179 fold_build2_loc (loc, code, type, arg,
6180 build_real (TREE_TYPE (arg),
6189 /* Subroutine of fold() that optimizes comparisons against Infinities,
6190 either +Inf or -Inf.
6192 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6193 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6194 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6196 The function returns the constant folded tree if a simplification
6197 can be made, and NULL_TREE otherwise. */
6200 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6201 tree arg0, tree arg1)
6203 enum machine_mode mode;
6204 REAL_VALUE_TYPE max;
6208 mode = TYPE_MODE (TREE_TYPE (arg0));
6210 /* For negative infinity swap the sense of the comparison. */
6211 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6213 code = swap_tree_comparison (code);
6218 /* x > +Inf is always false, if with ignore sNANs. */
6219 if (HONOR_SNANS (mode))
6221 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6224 /* x <= +Inf is always true, if we don't case about NaNs. */
6225 if (! HONOR_NANS (mode))
6226 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6228 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6229 if (lang_hooks.decls.global_bindings_p () == 0
6230 && ! CONTAINS_PLACEHOLDER_P (arg0))
6232 arg0 = save_expr (arg0);
6233 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6239 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6240 real_maxval (&max, neg, mode);
6241 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6242 arg0, build_real (TREE_TYPE (arg0), max));
6245 /* x < +Inf is always equal to x <= DBL_MAX. */
6246 real_maxval (&max, neg, mode);
6247 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6248 arg0, build_real (TREE_TYPE (arg0), max));
6251 /* x != +Inf is always equal to !(x > DBL_MAX). */
6252 real_maxval (&max, neg, mode);
6253 if (! HONOR_NANS (mode))
6254 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6255 arg0, build_real (TREE_TYPE (arg0), max));
6257 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6258 arg0, build_real (TREE_TYPE (arg0), max));
6259 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6268 /* Subroutine of fold() that optimizes comparisons of a division by
6269 a nonzero integer constant against an integer constant, i.e.
6272 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6273 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6274 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6276 The function returns the constant folded tree if a simplification
6277 can be made, and NULL_TREE otherwise. */
6280 fold_div_compare (location_t loc,
6281 enum tree_code code, tree type, tree arg0, tree arg1)
6283 tree prod, tmp, hi, lo;
6284 tree arg00 = TREE_OPERAND (arg0, 0);
6285 tree arg01 = TREE_OPERAND (arg0, 1);
6287 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6291 /* We have to do this the hard way to detect unsigned overflow.
6292 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6293 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6294 TREE_INT_CST_HIGH (arg01),
6295 TREE_INT_CST_LOW (arg1),
6296 TREE_INT_CST_HIGH (arg1),
6297 &val.low, &val.high, unsigned_p);
6298 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6299 neg_overflow = false;
6303 tmp = int_const_binop (MINUS_EXPR, arg01,
6304 build_int_cst (TREE_TYPE (arg01), 1));
6307 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6308 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6309 TREE_INT_CST_HIGH (prod),
6310 TREE_INT_CST_LOW (tmp),
6311 TREE_INT_CST_HIGH (tmp),
6312 &val.low, &val.high, unsigned_p);
6313 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6314 -1, overflow | TREE_OVERFLOW (prod));
6316 else if (tree_int_cst_sgn (arg01) >= 0)
6318 tmp = int_const_binop (MINUS_EXPR, arg01,
6319 build_int_cst (TREE_TYPE (arg01), 1));
6320 switch (tree_int_cst_sgn (arg1))
6323 neg_overflow = true;
6324 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6329 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6334 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6344 /* A negative divisor reverses the relational operators. */
6345 code = swap_tree_comparison (code);
6347 tmp = int_const_binop (PLUS_EXPR, arg01,
6348 build_int_cst (TREE_TYPE (arg01), 1));
6349 switch (tree_int_cst_sgn (arg1))
6352 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6357 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6362 neg_overflow = true;
6363 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6375 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6376 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6377 if (TREE_OVERFLOW (hi))
6378 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6379 if (TREE_OVERFLOW (lo))
6380 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6381 return build_range_check (loc, type, arg00, 1, lo, hi);
6384 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6385 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6386 if (TREE_OVERFLOW (hi))
6387 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6388 if (TREE_OVERFLOW (lo))
6389 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6390 return build_range_check (loc, type, arg00, 0, lo, hi);
6393 if (TREE_OVERFLOW (lo))
6395 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6396 return omit_one_operand_loc (loc, type, tmp, arg00);
6398 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6401 if (TREE_OVERFLOW (hi))
6403 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6404 return omit_one_operand_loc (loc, type, tmp, arg00);
6406 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6409 if (TREE_OVERFLOW (hi))
6411 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6412 return omit_one_operand_loc (loc, type, tmp, arg00);
6414 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6417 if (TREE_OVERFLOW (lo))
6419 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6420 return omit_one_operand_loc (loc, type, tmp, arg00);
6422 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6432 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6433 equality/inequality test, then return a simplified form of the test
6434 using a sign testing. Otherwise return NULL. TYPE is the desired
6438 fold_single_bit_test_into_sign_test (location_t loc,
6439 enum tree_code code, tree arg0, tree arg1,
6442 /* If this is testing a single bit, we can optimize the test. */
6443 if ((code == NE_EXPR || code == EQ_EXPR)
6444 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6445 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6447 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6448 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6449 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6451 if (arg00 != NULL_TREE
6452 /* This is only a win if casting to a signed type is cheap,
6453 i.e. when arg00's type is not a partial mode. */
6454 && TYPE_PRECISION (TREE_TYPE (arg00))
6455 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6457 tree stype = signed_type_for (TREE_TYPE (arg00));
6458 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6460 fold_convert_loc (loc, stype, arg00),
6461 build_int_cst (stype, 0));
6468 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6469 equality/inequality test, then return a simplified form of
6470 the test using shifts and logical operations. Otherwise return
6471 NULL. TYPE is the desired result type. */
6474 fold_single_bit_test (location_t loc, enum tree_code code,
6475 tree arg0, tree arg1, tree result_type)
6477 /* If this is testing a single bit, we can optimize the test. */
6478 if ((code == NE_EXPR || code == EQ_EXPR)
6479 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6480 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6482 tree inner = TREE_OPERAND (arg0, 0);
6483 tree type = TREE_TYPE (arg0);
6484 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6485 enum machine_mode operand_mode = TYPE_MODE (type);
6487 tree signed_type, unsigned_type, intermediate_type;
6490 /* First, see if we can fold the single bit test into a sign-bit
6492 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6497 /* Otherwise we have (A & C) != 0 where C is a single bit,
6498 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6499 Similarly for (A & C) == 0. */
6501 /* If INNER is a right shift of a constant and it plus BITNUM does
6502 not overflow, adjust BITNUM and INNER. */
6503 if (TREE_CODE (inner) == RSHIFT_EXPR
6504 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6505 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6506 && bitnum < TYPE_PRECISION (type)
6507 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6508 bitnum - TYPE_PRECISION (type)))
6510 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6511 inner = TREE_OPERAND (inner, 0);
6514 /* If we are going to be able to omit the AND below, we must do our
6515 operations as unsigned. If we must use the AND, we have a choice.
6516 Normally unsigned is faster, but for some machines signed is. */
6517 #ifdef LOAD_EXTEND_OP
6518 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6519 && !flag_syntax_only) ? 0 : 1;
6524 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6525 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6526 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6527 inner = fold_convert_loc (loc, intermediate_type, inner);
6530 inner = build2 (RSHIFT_EXPR, intermediate_type,
6531 inner, size_int (bitnum));
6533 one = build_int_cst (intermediate_type, 1);
6535 if (code == EQ_EXPR)
6536 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6538 /* Put the AND last so it can combine with more things. */
6539 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6541 /* Make sure to return the proper type. */
6542 inner = fold_convert_loc (loc, result_type, inner);
6549 /* Check whether we are allowed to reorder operands arg0 and arg1,
6550 such that the evaluation of arg1 occurs before arg0. */
6553 reorder_operands_p (const_tree arg0, const_tree arg1)
6555 if (! flag_evaluation_order)
6557 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6559 return ! TREE_SIDE_EFFECTS (arg0)
6560 && ! TREE_SIDE_EFFECTS (arg1);
6563 /* Test whether it is preferable two swap two operands, ARG0 and
6564 ARG1, for example because ARG0 is an integer constant and ARG1
6565 isn't. If REORDER is true, only recommend swapping if we can
6566 evaluate the operands in reverse order. */
6569 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6571 STRIP_SIGN_NOPS (arg0);
6572 STRIP_SIGN_NOPS (arg1);
6574 if (TREE_CODE (arg1) == INTEGER_CST)
6576 if (TREE_CODE (arg0) == INTEGER_CST)
6579 if (TREE_CODE (arg1) == REAL_CST)
6581 if (TREE_CODE (arg0) == REAL_CST)
6584 if (TREE_CODE (arg1) == FIXED_CST)
6586 if (TREE_CODE (arg0) == FIXED_CST)
6589 if (TREE_CODE (arg1) == COMPLEX_CST)
6591 if (TREE_CODE (arg0) == COMPLEX_CST)
6594 if (TREE_CONSTANT (arg1))
6596 if (TREE_CONSTANT (arg0))
6599 if (optimize_function_for_size_p (cfun))
6602 if (reorder && flag_evaluation_order
6603 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6606 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6607 for commutative and comparison operators. Ensuring a canonical
6608 form allows the optimizers to find additional redundancies without
6609 having to explicitly check for both orderings. */
6610 if (TREE_CODE (arg0) == SSA_NAME
6611 && TREE_CODE (arg1) == SSA_NAME
6612 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6615 /* Put SSA_NAMEs last. */
6616 if (TREE_CODE (arg1) == SSA_NAME)
6618 if (TREE_CODE (arg0) == SSA_NAME)
6621 /* Put variables last. */
6630 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6631 ARG0 is extended to a wider type. */
6634 fold_widened_comparison (location_t loc, enum tree_code code,
6635 tree type, tree arg0, tree arg1)
6637 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6639 tree shorter_type, outer_type;
6643 if (arg0_unw == arg0)
6645 shorter_type = TREE_TYPE (arg0_unw);
6647 #ifdef HAVE_canonicalize_funcptr_for_compare
6648 /* Disable this optimization if we're casting a function pointer
6649 type on targets that require function pointer canonicalization. */
6650 if (HAVE_canonicalize_funcptr_for_compare
6651 && TREE_CODE (shorter_type) == POINTER_TYPE
6652 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6656 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6659 arg1_unw = get_unwidened (arg1, NULL_TREE);
6661 /* If possible, express the comparison in the shorter mode. */
6662 if ((code == EQ_EXPR || code == NE_EXPR
6663 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6664 && (TREE_TYPE (arg1_unw) == shorter_type
6665 || ((TYPE_PRECISION (shorter_type)
6666 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6667 && (TYPE_UNSIGNED (shorter_type)
6668 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6669 || (TREE_CODE (arg1_unw) == INTEGER_CST
6670 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6671 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6672 && int_fits_type_p (arg1_unw, shorter_type))))
6673 return fold_build2_loc (loc, code, type, arg0_unw,
6674 fold_convert_loc (loc, shorter_type, arg1_unw));
6676 if (TREE_CODE (arg1_unw) != INTEGER_CST
6677 || TREE_CODE (shorter_type) != INTEGER_TYPE
6678 || !int_fits_type_p (arg1_unw, shorter_type))
6681 /* If we are comparing with the integer that does not fit into the range
6682 of the shorter type, the result is known. */
6683 outer_type = TREE_TYPE (arg1_unw);
6684 min = lower_bound_in_type (outer_type, shorter_type);
6685 max = upper_bound_in_type (outer_type, shorter_type);
6687 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6689 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6696 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6701 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6707 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6709 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6714 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6716 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6725 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6726 ARG0 just the signedness is changed. */
6729 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6730 tree arg0, tree arg1)
6733 tree inner_type, outer_type;
6735 if (!CONVERT_EXPR_P (arg0))
6738 outer_type = TREE_TYPE (arg0);
6739 arg0_inner = TREE_OPERAND (arg0, 0);
6740 inner_type = TREE_TYPE (arg0_inner);
6742 #ifdef HAVE_canonicalize_funcptr_for_compare
6743 /* Disable this optimization if we're casting a function pointer
6744 type on targets that require function pointer canonicalization. */
6745 if (HAVE_canonicalize_funcptr_for_compare
6746 && TREE_CODE (inner_type) == POINTER_TYPE
6747 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6751 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6754 if (TREE_CODE (arg1) != INTEGER_CST
6755 && !(CONVERT_EXPR_P (arg1)
6756 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6759 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6760 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6765 if (TREE_CODE (arg1) == INTEGER_CST)
6766 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6767 0, TREE_OVERFLOW (arg1));
6769 arg1 = fold_convert_loc (loc, inner_type, arg1);
6771 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6774 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6775 step of the array. Reconstructs s and delta in the case of s *
6776 delta being an integer constant (and thus already folded). ADDR is
6777 the address. MULT is the multiplicative expression. If the
6778 function succeeds, the new address expression is returned.
6779 Otherwise NULL_TREE is returned. LOC is the location of the
6780 resulting expression. */
6783 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6785 tree s, delta, step;
6786 tree ref = TREE_OPERAND (addr, 0), pref;
6791 /* Strip the nops that might be added when converting op1 to sizetype. */
6794 /* Canonicalize op1 into a possibly non-constant delta
6795 and an INTEGER_CST s. */
6796 if (TREE_CODE (op1) == MULT_EXPR)
6798 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6803 if (TREE_CODE (arg0) == INTEGER_CST)
6808 else if (TREE_CODE (arg1) == INTEGER_CST)
6816 else if (TREE_CODE (op1) == INTEGER_CST)
6823 /* Simulate we are delta * 1. */
6825 s = integer_one_node;
6828 for (;; ref = TREE_OPERAND (ref, 0))
6830 if (TREE_CODE (ref) == ARRAY_REF)
6834 /* Remember if this was a multi-dimensional array. */
6835 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6838 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6841 itype = TREE_TYPE (domain);
6843 step = array_ref_element_size (ref);
6844 if (TREE_CODE (step) != INTEGER_CST)
6849 if (! tree_int_cst_equal (step, s))
6854 /* Try if delta is a multiple of step. */
6855 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6861 /* Only fold here if we can verify we do not overflow one
6862 dimension of a multi-dimensional array. */
6867 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6868 || !TYPE_MAX_VALUE (domain)
6869 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6872 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6873 fold_convert_loc (loc, itype,
6874 TREE_OPERAND (ref, 1)),
6875 fold_convert_loc (loc, itype, delta));
6877 || TREE_CODE (tmp) != INTEGER_CST
6878 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6887 if (!handled_component_p (ref))
6891 /* We found the suitable array reference. So copy everything up to it,
6892 and replace the index. */
6894 pref = TREE_OPERAND (addr, 0);
6895 ret = copy_node (pref);
6896 SET_EXPR_LOCATION (ret, loc);
6901 pref = TREE_OPERAND (pref, 0);
6902 TREE_OPERAND (pos, 0) = copy_node (pref);
6903 pos = TREE_OPERAND (pos, 0);
6906 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6907 fold_convert_loc (loc, itype,
6908 TREE_OPERAND (pos, 1)),
6909 fold_convert_loc (loc, itype, delta));
6911 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6915 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6916 means A >= Y && A != MAX, but in this case we know that
6917 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6920 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6922 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6924 if (TREE_CODE (bound) == LT_EXPR)
6925 a = TREE_OPERAND (bound, 0);
6926 else if (TREE_CODE (bound) == GT_EXPR)
6927 a = TREE_OPERAND (bound, 1);
6931 typea = TREE_TYPE (a);
6932 if (!INTEGRAL_TYPE_P (typea)
6933 && !POINTER_TYPE_P (typea))
6936 if (TREE_CODE (ineq) == LT_EXPR)
6938 a1 = TREE_OPERAND (ineq, 1);
6939 y = TREE_OPERAND (ineq, 0);
6941 else if (TREE_CODE (ineq) == GT_EXPR)
6943 a1 = TREE_OPERAND (ineq, 0);
6944 y = TREE_OPERAND (ineq, 1);
6949 if (TREE_TYPE (a1) != typea)
6952 if (POINTER_TYPE_P (typea))
6954 /* Convert the pointer types into integer before taking the difference. */
6955 tree ta = fold_convert_loc (loc, ssizetype, a);
6956 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6957 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6960 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6962 if (!diff || !integer_onep (diff))
6965 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6968 /* Fold a sum or difference of at least one multiplication.
6969 Returns the folded tree or NULL if no simplification could be made. */
6972 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6973 tree arg0, tree arg1)
6975 tree arg00, arg01, arg10, arg11;
6976 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6978 /* (A * C) +- (B * C) -> (A+-B) * C.
6979 (A * C) +- A -> A * (C+-1).
6980 We are most concerned about the case where C is a constant,
6981 but other combinations show up during loop reduction. Since
6982 it is not difficult, try all four possibilities. */
6984 if (TREE_CODE (arg0) == MULT_EXPR)
6986 arg00 = TREE_OPERAND (arg0, 0);
6987 arg01 = TREE_OPERAND (arg0, 1);
6989 else if (TREE_CODE (arg0) == INTEGER_CST)
6991 arg00 = build_one_cst (type);
6996 /* We cannot generate constant 1 for fract. */
6997 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7000 arg01 = build_one_cst (type);
7002 if (TREE_CODE (arg1) == MULT_EXPR)
7004 arg10 = TREE_OPERAND (arg1, 0);
7005 arg11 = TREE_OPERAND (arg1, 1);
7007 else if (TREE_CODE (arg1) == INTEGER_CST)
7009 arg10 = build_one_cst (type);
7010 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7011 the purpose of this canonicalization. */
7012 if (TREE_INT_CST_HIGH (arg1) == -1
7013 && negate_expr_p (arg1)
7014 && code == PLUS_EXPR)
7016 arg11 = negate_expr (arg1);
7024 /* We cannot generate constant 1 for fract. */
7025 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7028 arg11 = build_one_cst (type);
7032 if (operand_equal_p (arg01, arg11, 0))
7033 same = arg01, alt0 = arg00, alt1 = arg10;
7034 else if (operand_equal_p (arg00, arg10, 0))
7035 same = arg00, alt0 = arg01, alt1 = arg11;
7036 else if (operand_equal_p (arg00, arg11, 0))
7037 same = arg00, alt0 = arg01, alt1 = arg10;
7038 else if (operand_equal_p (arg01, arg10, 0))
7039 same = arg01, alt0 = arg00, alt1 = arg11;
7041 /* No identical multiplicands; see if we can find a common
7042 power-of-two factor in non-power-of-two multiplies. This
7043 can help in multi-dimensional array access. */
7044 else if (host_integerp (arg01, 0)
7045 && host_integerp (arg11, 0))
7047 HOST_WIDE_INT int01, int11, tmp;
7050 int01 = TREE_INT_CST_LOW (arg01);
7051 int11 = TREE_INT_CST_LOW (arg11);
7053 /* Move min of absolute values to int11. */
7054 if ((int01 >= 0 ? int01 : -int01)
7055 < (int11 >= 0 ? int11 : -int11))
7057 tmp = int01, int01 = int11, int11 = tmp;
7058 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7065 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7066 /* The remainder should not be a constant, otherwise we
7067 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7068 increased the number of multiplications necessary. */
7069 && TREE_CODE (arg10) != INTEGER_CST)
7071 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7072 build_int_cst (TREE_TYPE (arg00),
7077 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7082 return fold_build2_loc (loc, MULT_EXPR, type,
7083 fold_build2_loc (loc, code, type,
7084 fold_convert_loc (loc, type, alt0),
7085 fold_convert_loc (loc, type, alt1)),
7086 fold_convert_loc (loc, type, same));
7091 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7092 specified by EXPR into the buffer PTR of length LEN bytes.
7093 Return the number of bytes placed in the buffer, or zero
7097 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7099 tree type = TREE_TYPE (expr);
7100 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7101 int byte, offset, word, words;
7102 unsigned char value;
7104 if (total_bytes > len)
7106 words = total_bytes / UNITS_PER_WORD;
7108 for (byte = 0; byte < total_bytes; byte++)
7110 int bitpos = byte * BITS_PER_UNIT;
7111 if (bitpos < HOST_BITS_PER_WIDE_INT)
7112 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7114 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7115 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7117 if (total_bytes > UNITS_PER_WORD)
7119 word = byte / UNITS_PER_WORD;
7120 if (WORDS_BIG_ENDIAN)
7121 word = (words - 1) - word;
7122 offset = word * UNITS_PER_WORD;
7123 if (BYTES_BIG_ENDIAN)
7124 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7126 offset += byte % UNITS_PER_WORD;
7129 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7130 ptr[offset] = value;
7136 /* Subroutine of native_encode_expr. Encode the REAL_CST
7137 specified by EXPR into the buffer PTR of length LEN bytes.
7138 Return the number of bytes placed in the buffer, or zero
7142 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7144 tree type = TREE_TYPE (expr);
7145 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7146 int byte, offset, word, words, bitpos;
7147 unsigned char value;
7149 /* There are always 32 bits in each long, no matter the size of
7150 the hosts long. We handle floating point representations with
7154 if (total_bytes > len)
7156 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7158 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7160 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7161 bitpos += BITS_PER_UNIT)
7163 byte = (bitpos / BITS_PER_UNIT) & 3;
7164 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7166 if (UNITS_PER_WORD < 4)
7168 word = byte / UNITS_PER_WORD;
7169 if (WORDS_BIG_ENDIAN)
7170 word = (words - 1) - word;
7171 offset = word * UNITS_PER_WORD;
7172 if (BYTES_BIG_ENDIAN)
7173 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7175 offset += byte % UNITS_PER_WORD;
7178 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7179 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7184 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7185 specified by EXPR into the buffer PTR of length LEN bytes.
7186 Return the number of bytes placed in the buffer, or zero
7190 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7195 part = TREE_REALPART (expr);
7196 rsize = native_encode_expr (part, ptr, len);
7199 part = TREE_IMAGPART (expr);
7200 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7203 return rsize + isize;
7207 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7208 specified by EXPR into the buffer PTR of length LEN bytes.
7209 Return the number of bytes placed in the buffer, or zero
7213 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7215 int i, size, offset, count;
7216 tree itype, elem, elements;
7219 elements = TREE_VECTOR_CST_ELTS (expr);
7220 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7221 itype = TREE_TYPE (TREE_TYPE (expr));
7222 size = GET_MODE_SIZE (TYPE_MODE (itype));
7223 for (i = 0; i < count; i++)
7227 elem = TREE_VALUE (elements);
7228 elements = TREE_CHAIN (elements);
7235 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7240 if (offset + size > len)
7242 memset (ptr+offset, 0, size);
7250 /* Subroutine of native_encode_expr. Encode the STRING_CST
7251 specified by EXPR into the buffer PTR of length LEN bytes.
7252 Return the number of bytes placed in the buffer, or zero
7256 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7258 tree type = TREE_TYPE (expr);
7259 HOST_WIDE_INT total_bytes;
7261 if (TREE_CODE (type) != ARRAY_TYPE
7262 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7263 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7264 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7266 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7267 if (total_bytes > len)
7269 if (TREE_STRING_LENGTH (expr) < total_bytes)
7271 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7272 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7273 total_bytes - TREE_STRING_LENGTH (expr));
7276 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7281 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7282 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7283 buffer PTR of length LEN bytes. Return the number of bytes
7284 placed in the buffer, or zero upon failure. */
7287 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7289 switch (TREE_CODE (expr))
7292 return native_encode_int (expr, ptr, len);
7295 return native_encode_real (expr, ptr, len);
7298 return native_encode_complex (expr, ptr, len);
7301 return native_encode_vector (expr, ptr, len);
7304 return native_encode_string (expr, ptr, len);
7312 /* Subroutine of native_interpret_expr. Interpret the contents of
7313 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7314 If the buffer cannot be interpreted, return NULL_TREE. */
7317 native_interpret_int (tree type, const unsigned char *ptr, int len)
7319 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7320 int byte, offset, word, words;
7321 unsigned char value;
7324 if (total_bytes > len)
7326 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7329 result = double_int_zero;
7330 words = total_bytes / UNITS_PER_WORD;
7332 for (byte = 0; byte < total_bytes; byte++)
7334 int bitpos = byte * BITS_PER_UNIT;
7335 if (total_bytes > UNITS_PER_WORD)
7337 word = byte / UNITS_PER_WORD;
7338 if (WORDS_BIG_ENDIAN)
7339 word = (words - 1) - word;
7340 offset = word * UNITS_PER_WORD;
7341 if (BYTES_BIG_ENDIAN)
7342 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7344 offset += byte % UNITS_PER_WORD;
7347 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7348 value = ptr[offset];
7350 if (bitpos < HOST_BITS_PER_WIDE_INT)
7351 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7353 result.high |= (unsigned HOST_WIDE_INT) value
7354 << (bitpos - HOST_BITS_PER_WIDE_INT);
7357 return double_int_to_tree (type, result);
7361 /* Subroutine of native_interpret_expr. Interpret the contents of
7362 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7363 If the buffer cannot be interpreted, return NULL_TREE. */
7366 native_interpret_real (tree type, const unsigned char *ptr, int len)
7368 enum machine_mode mode = TYPE_MODE (type);
7369 int total_bytes = GET_MODE_SIZE (mode);
7370 int byte, offset, word, words, bitpos;
7371 unsigned char value;
7372 /* There are always 32 bits in each long, no matter the size of
7373 the hosts long. We handle floating point representations with
7378 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7379 if (total_bytes > len || total_bytes > 24)
7381 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7383 memset (tmp, 0, sizeof (tmp));
7384 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7385 bitpos += BITS_PER_UNIT)
7387 byte = (bitpos / BITS_PER_UNIT) & 3;
7388 if (UNITS_PER_WORD < 4)
7390 word = byte / UNITS_PER_WORD;
7391 if (WORDS_BIG_ENDIAN)
7392 word = (words - 1) - word;
7393 offset = word * UNITS_PER_WORD;
7394 if (BYTES_BIG_ENDIAN)
7395 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7397 offset += byte % UNITS_PER_WORD;
7400 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7401 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7403 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7406 real_from_target (&r, tmp, mode);
7407 return build_real (type, r);
7411 /* Subroutine of native_interpret_expr. Interpret the contents of
7412 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7413 If the buffer cannot be interpreted, return NULL_TREE. */
7416 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7418 tree etype, rpart, ipart;
7421 etype = TREE_TYPE (type);
7422 size = GET_MODE_SIZE (TYPE_MODE (etype));
7425 rpart = native_interpret_expr (etype, ptr, size);
7428 ipart = native_interpret_expr (etype, ptr+size, size);
7431 return build_complex (type, rpart, ipart);
7435 /* Subroutine of native_interpret_expr. Interpret the contents of
7436 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7437 If the buffer cannot be interpreted, return NULL_TREE. */
7440 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7442 tree etype, elem, elements;
7445 etype = TREE_TYPE (type);
7446 size = GET_MODE_SIZE (TYPE_MODE (etype));
7447 count = TYPE_VECTOR_SUBPARTS (type);
7448 if (size * count > len)
7451 elements = NULL_TREE;
7452 for (i = count - 1; i >= 0; i--)
7454 elem = native_interpret_expr (etype, ptr+(i*size), size);
7457 elements = tree_cons (NULL_TREE, elem, elements);
7459 return build_vector (type, elements);
7463 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7464 the buffer PTR of length LEN as a constant of type TYPE. For
7465 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7466 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7467 return NULL_TREE. */
7470 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7472 switch (TREE_CODE (type))
7477 return native_interpret_int (type, ptr, len);
7480 return native_interpret_real (type, ptr, len);
7483 return native_interpret_complex (type, ptr, len);
7486 return native_interpret_vector (type, ptr, len);
7494 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7495 TYPE at compile-time. If we're unable to perform the conversion
7496 return NULL_TREE. */
7499 fold_view_convert_expr (tree type, tree expr)
7501 /* We support up to 512-bit values (for V8DFmode). */
7502 unsigned char buffer[64];
7505 /* Check that the host and target are sane. */
7506 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7509 len = native_encode_expr (expr, buffer, sizeof (buffer));
7513 return native_interpret_expr (type, buffer, len);
7516 /* Build an expression for the address of T. Folds away INDIRECT_REF
7517 to avoid confusing the gimplify process. */
7520 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7522 /* The size of the object is not relevant when talking about its address. */
7523 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7524 t = TREE_OPERAND (t, 0);
7526 if (TREE_CODE (t) == INDIRECT_REF)
7528 t = TREE_OPERAND (t, 0);
7530 if (TREE_TYPE (t) != ptrtype)
7531 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7533 else if (TREE_CODE (t) == MEM_REF
7534 && integer_zerop (TREE_OPERAND (t, 1)))
7535 return TREE_OPERAND (t, 0);
7536 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7538 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7540 if (TREE_TYPE (t) != ptrtype)
7541 t = fold_convert_loc (loc, ptrtype, t);
7544 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7549 /* Build an expression for the address of T. */
7552 build_fold_addr_expr_loc (location_t loc, tree t)
7554 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7556 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7559 /* Fold a unary expression of code CODE and type TYPE with operand
7560 OP0. Return the folded expression if folding is successful.
7561 Otherwise, return NULL_TREE. */
7564 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7568 enum tree_code_class kind = TREE_CODE_CLASS (code);
7570 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7571 && TREE_CODE_LENGTH (code) == 1);
7576 if (CONVERT_EXPR_CODE_P (code)
7577 || code == FLOAT_EXPR || code == ABS_EXPR)
7579 /* Don't use STRIP_NOPS, because signedness of argument type
7581 STRIP_SIGN_NOPS (arg0);
7585 /* Strip any conversions that don't change the mode. This
7586 is safe for every expression, except for a comparison
7587 expression because its signedness is derived from its
7590 Note that this is done as an internal manipulation within
7591 the constant folder, in order to find the simplest
7592 representation of the arguments so that their form can be
7593 studied. In any cases, the appropriate type conversions
7594 should be put back in the tree that will get out of the
7600 if (TREE_CODE_CLASS (code) == tcc_unary)
7602 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7603 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7604 fold_build1_loc (loc, code, type,
7605 fold_convert_loc (loc, TREE_TYPE (op0),
7606 TREE_OPERAND (arg0, 1))));
7607 else if (TREE_CODE (arg0) == COND_EXPR)
7609 tree arg01 = TREE_OPERAND (arg0, 1);
7610 tree arg02 = TREE_OPERAND (arg0, 2);
7611 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7612 arg01 = fold_build1_loc (loc, code, type,
7613 fold_convert_loc (loc,
7614 TREE_TYPE (op0), arg01));
7615 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7616 arg02 = fold_build1_loc (loc, code, type,
7617 fold_convert_loc (loc,
7618 TREE_TYPE (op0), arg02));
7619 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7622 /* If this was a conversion, and all we did was to move into
7623 inside the COND_EXPR, bring it back out. But leave it if
7624 it is a conversion from integer to integer and the
7625 result precision is no wider than a word since such a
7626 conversion is cheap and may be optimized away by combine,
7627 while it couldn't if it were outside the COND_EXPR. Then return
7628 so we don't get into an infinite recursion loop taking the
7629 conversion out and then back in. */
7631 if ((CONVERT_EXPR_CODE_P (code)
7632 || code == NON_LVALUE_EXPR)
7633 && TREE_CODE (tem) == COND_EXPR
7634 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7635 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7636 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7637 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7638 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7639 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7640 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7642 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7643 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7644 || flag_syntax_only))
7645 tem = build1_loc (loc, code, type,
7647 TREE_TYPE (TREE_OPERAND
7648 (TREE_OPERAND (tem, 1), 0)),
7649 TREE_OPERAND (tem, 0),
7650 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7651 TREE_OPERAND (TREE_OPERAND (tem, 2),
7655 else if (COMPARISON_CLASS_P (arg0))
7657 if (TREE_CODE (type) == BOOLEAN_TYPE)
7659 arg0 = copy_node (arg0);
7660 TREE_TYPE (arg0) = type;
7663 else if (TREE_CODE (type) != INTEGER_TYPE)
7664 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7665 fold_build1_loc (loc, code, type,
7667 fold_build1_loc (loc, code, type,
7668 integer_zero_node));
7675 /* Re-association barriers around constants and other re-association
7676 barriers can be removed. */
7677 if (CONSTANT_CLASS_P (op0)
7678 || TREE_CODE (op0) == PAREN_EXPR)
7679 return fold_convert_loc (loc, type, op0);
7684 case FIX_TRUNC_EXPR:
7685 if (TREE_TYPE (op0) == type)
7688 /* If we have (type) (a CMP b) and type is an integral type, return
7689 new expression involving the new type. */
7690 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7691 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7692 TREE_OPERAND (op0, 1));
7694 /* Handle cases of two conversions in a row. */
7695 if (CONVERT_EXPR_P (op0))
7697 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7698 tree inter_type = TREE_TYPE (op0);
7699 int inside_int = INTEGRAL_TYPE_P (inside_type);
7700 int inside_ptr = POINTER_TYPE_P (inside_type);
7701 int inside_float = FLOAT_TYPE_P (inside_type);
7702 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7703 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7704 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7705 int inter_int = INTEGRAL_TYPE_P (inter_type);
7706 int inter_ptr = POINTER_TYPE_P (inter_type);
7707 int inter_float = FLOAT_TYPE_P (inter_type);
7708 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7709 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7710 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7711 int final_int = INTEGRAL_TYPE_P (type);
7712 int final_ptr = POINTER_TYPE_P (type);
7713 int final_float = FLOAT_TYPE_P (type);
7714 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7715 unsigned int final_prec = TYPE_PRECISION (type);
7716 int final_unsignedp = TYPE_UNSIGNED (type);
7718 /* In addition to the cases of two conversions in a row
7719 handled below, if we are converting something to its own
7720 type via an object of identical or wider precision, neither
7721 conversion is needed. */
7722 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7723 && (((inter_int || inter_ptr) && final_int)
7724 || (inter_float && final_float))
7725 && inter_prec >= final_prec)
7726 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7728 /* Likewise, if the intermediate and initial types are either both
7729 float or both integer, we don't need the middle conversion if the
7730 former is wider than the latter and doesn't change the signedness
7731 (for integers). Avoid this if the final type is a pointer since
7732 then we sometimes need the middle conversion. Likewise if the
7733 final type has a precision not equal to the size of its mode. */
7734 if (((inter_int && inside_int)
7735 || (inter_float && inside_float)
7736 || (inter_vec && inside_vec))
7737 && inter_prec >= inside_prec
7738 && (inter_float || inter_vec
7739 || inter_unsignedp == inside_unsignedp)
7740 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7741 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7743 && (! final_vec || inter_prec == inside_prec))
7744 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7746 /* If we have a sign-extension of a zero-extended value, we can
7747 replace that by a single zero-extension. */
7748 if (inside_int && inter_int && final_int
7749 && inside_prec < inter_prec && inter_prec < final_prec
7750 && inside_unsignedp && !inter_unsignedp)
7751 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7753 /* Two conversions in a row are not needed unless:
7754 - some conversion is floating-point (overstrict for now), or
7755 - some conversion is a vector (overstrict for now), or
7756 - the intermediate type is narrower than both initial and
7758 - the intermediate type and innermost type differ in signedness,
7759 and the outermost type is wider than the intermediate, or
7760 - the initial type is a pointer type and the precisions of the
7761 intermediate and final types differ, or
7762 - the final type is a pointer type and the precisions of the
7763 initial and intermediate types differ. */
7764 if (! inside_float && ! inter_float && ! final_float
7765 && ! inside_vec && ! inter_vec && ! final_vec
7766 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7767 && ! (inside_int && inter_int
7768 && inter_unsignedp != inside_unsignedp
7769 && inter_prec < final_prec)
7770 && ((inter_unsignedp && inter_prec > inside_prec)
7771 == (final_unsignedp && final_prec > inter_prec))
7772 && ! (inside_ptr && inter_prec != final_prec)
7773 && ! (final_ptr && inside_prec != inter_prec)
7774 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7775 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7776 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7779 /* Handle (T *)&A.B.C for A being of type T and B and C
7780 living at offset zero. This occurs frequently in
7781 C++ upcasting and then accessing the base. */
7782 if (TREE_CODE (op0) == ADDR_EXPR
7783 && POINTER_TYPE_P (type)
7784 && handled_component_p (TREE_OPERAND (op0, 0)))
7786 HOST_WIDE_INT bitsize, bitpos;
7788 enum machine_mode mode;
7789 int unsignedp, volatilep;
7790 tree base = TREE_OPERAND (op0, 0);
7791 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7792 &mode, &unsignedp, &volatilep, false);
7793 /* If the reference was to a (constant) zero offset, we can use
7794 the address of the base if it has the same base type
7795 as the result type and the pointer type is unqualified. */
7796 if (! offset && bitpos == 0
7797 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7798 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7799 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7800 return fold_convert_loc (loc, type,
7801 build_fold_addr_expr_loc (loc, base));
7804 if (TREE_CODE (op0) == MODIFY_EXPR
7805 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7806 /* Detect assigning a bitfield. */
7807 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7809 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7811 /* Don't leave an assignment inside a conversion
7812 unless assigning a bitfield. */
7813 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7814 /* First do the assignment, then return converted constant. */
7815 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7816 TREE_NO_WARNING (tem) = 1;
7817 TREE_USED (tem) = 1;
7821 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7822 constants (if x has signed type, the sign bit cannot be set
7823 in c). This folds extension into the BIT_AND_EXPR.
7824 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7825 very likely don't have maximal range for their precision and this
7826 transformation effectively doesn't preserve non-maximal ranges. */
7827 if (TREE_CODE (type) == INTEGER_TYPE
7828 && TREE_CODE (op0) == BIT_AND_EXPR
7829 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7831 tree and_expr = op0;
7832 tree and0 = TREE_OPERAND (and_expr, 0);
7833 tree and1 = TREE_OPERAND (and_expr, 1);
7836 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7837 || (TYPE_PRECISION (type)
7838 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7840 else if (TYPE_PRECISION (TREE_TYPE (and1))
7841 <= HOST_BITS_PER_WIDE_INT
7842 && host_integerp (and1, 1))
7844 unsigned HOST_WIDE_INT cst;
7846 cst = tree_low_cst (and1, 1);
7847 cst &= (HOST_WIDE_INT) -1
7848 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7849 change = (cst == 0);
7850 #ifdef LOAD_EXTEND_OP
7852 && !flag_syntax_only
7853 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7856 tree uns = unsigned_type_for (TREE_TYPE (and0));
7857 and0 = fold_convert_loc (loc, uns, and0);
7858 and1 = fold_convert_loc (loc, uns, and1);
7864 tem = force_fit_type_double (type, tree_to_double_int (and1),
7865 0, TREE_OVERFLOW (and1));
7866 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7867 fold_convert_loc (loc, type, and0), tem);
7871 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7872 when one of the new casts will fold away. Conservatively we assume
7873 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7874 if (POINTER_TYPE_P (type)
7875 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7876 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7877 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7878 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7880 tree arg00 = TREE_OPERAND (arg0, 0);
7881 tree arg01 = TREE_OPERAND (arg0, 1);
7883 return fold_build2_loc (loc,
7884 TREE_CODE (arg0), type,
7885 fold_convert_loc (loc, type, arg00),
7886 fold_convert_loc (loc, sizetype, arg01));
7889 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7890 of the same precision, and X is an integer type not narrower than
7891 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7892 if (INTEGRAL_TYPE_P (type)
7893 && TREE_CODE (op0) == BIT_NOT_EXPR
7894 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7895 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7896 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7898 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7899 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7900 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7901 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7902 fold_convert_loc (loc, type, tem));
7905 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7906 type of X and Y (integer types only). */
7907 if (INTEGRAL_TYPE_P (type)
7908 && TREE_CODE (op0) == MULT_EXPR
7909 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7910 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7912 /* Be careful not to introduce new overflows. */
7914 if (TYPE_OVERFLOW_WRAPS (type))
7917 mult_type = unsigned_type_for (type);
7919 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7921 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7922 fold_convert_loc (loc, mult_type,
7923 TREE_OPERAND (op0, 0)),
7924 fold_convert_loc (loc, mult_type,
7925 TREE_OPERAND (op0, 1)));
7926 return fold_convert_loc (loc, type, tem);
7930 tem = fold_convert_const (code, type, op0);
7931 return tem ? tem : NULL_TREE;
7933 case ADDR_SPACE_CONVERT_EXPR:
7934 if (integer_zerop (arg0))
7935 return fold_convert_const (code, type, arg0);
7938 case FIXED_CONVERT_EXPR:
7939 tem = fold_convert_const (code, type, arg0);
7940 return tem ? tem : NULL_TREE;
7942 case VIEW_CONVERT_EXPR:
7943 if (TREE_TYPE (op0) == type)
7945 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7946 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7947 type, TREE_OPERAND (op0, 0));
7948 if (TREE_CODE (op0) == MEM_REF)
7949 return fold_build2_loc (loc, MEM_REF, type,
7950 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7952 /* For integral conversions with the same precision or pointer
7953 conversions use a NOP_EXPR instead. */
7954 if ((INTEGRAL_TYPE_P (type)
7955 || POINTER_TYPE_P (type))
7956 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7957 || POINTER_TYPE_P (TREE_TYPE (op0)))
7958 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7959 return fold_convert_loc (loc, type, op0);
7961 /* Strip inner integral conversions that do not change the precision. */
7962 if (CONVERT_EXPR_P (op0)
7963 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7964 || POINTER_TYPE_P (TREE_TYPE (op0)))
7965 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7966 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7967 && (TYPE_PRECISION (TREE_TYPE (op0))
7968 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7969 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7970 type, TREE_OPERAND (op0, 0));
7972 return fold_view_convert_expr (type, op0);
7975 tem = fold_negate_expr (loc, arg0);
7977 return fold_convert_loc (loc, type, tem);
7981 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7982 return fold_abs_const (arg0, type);
7983 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7984 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7985 /* Convert fabs((double)float) into (double)fabsf(float). */
7986 else if (TREE_CODE (arg0) == NOP_EXPR
7987 && TREE_CODE (type) == REAL_TYPE)
7989 tree targ0 = strip_float_extensions (arg0);
7991 return fold_convert_loc (loc, type,
7992 fold_build1_loc (loc, ABS_EXPR,
7996 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7997 else if (TREE_CODE (arg0) == ABS_EXPR)
7999 else if (tree_expr_nonnegative_p (arg0))
8002 /* Strip sign ops from argument. */
8003 if (TREE_CODE (type) == REAL_TYPE)
8005 tem = fold_strip_sign_ops (arg0);
8007 return fold_build1_loc (loc, ABS_EXPR, type,
8008 fold_convert_loc (loc, type, tem));
8013 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8014 return fold_convert_loc (loc, type, arg0);
8015 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8017 tree itype = TREE_TYPE (type);
8018 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8019 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8020 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8021 negate_expr (ipart));
8023 if (TREE_CODE (arg0) == COMPLEX_CST)
8025 tree itype = TREE_TYPE (type);
8026 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8027 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8028 return build_complex (type, rpart, negate_expr (ipart));
8030 if (TREE_CODE (arg0) == CONJ_EXPR)
8031 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8035 if (TREE_CODE (arg0) == INTEGER_CST)
8036 return fold_not_const (arg0, type);
8037 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8038 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8039 /* Convert ~ (-A) to A - 1. */
8040 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8041 return fold_build2_loc (loc, MINUS_EXPR, type,
8042 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8043 build_int_cst (type, 1));
8044 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8045 else if (INTEGRAL_TYPE_P (type)
8046 && ((TREE_CODE (arg0) == MINUS_EXPR
8047 && integer_onep (TREE_OPERAND (arg0, 1)))
8048 || (TREE_CODE (arg0) == PLUS_EXPR
8049 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8050 return fold_build1_loc (loc, NEGATE_EXPR, type,
8051 fold_convert_loc (loc, type,
8052 TREE_OPERAND (arg0, 0)));
8053 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8054 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8055 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8056 fold_convert_loc (loc, type,
8057 TREE_OPERAND (arg0, 0)))))
8058 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8059 fold_convert_loc (loc, type,
8060 TREE_OPERAND (arg0, 1)));
8061 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8062 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8063 fold_convert_loc (loc, type,
8064 TREE_OPERAND (arg0, 1)))))
8065 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8066 fold_convert_loc (loc, type,
8067 TREE_OPERAND (arg0, 0)), tem);
8068 /* Perform BIT_NOT_EXPR on each element individually. */
8069 else if (TREE_CODE (arg0) == VECTOR_CST)
8071 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8072 int count = TYPE_VECTOR_SUBPARTS (type), i;
8074 for (i = 0; i < count; i++)
8078 elem = TREE_VALUE (elements);
8079 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8080 if (elem == NULL_TREE)
8082 elements = TREE_CHAIN (elements);
8085 elem = build_int_cst (TREE_TYPE (type), -1);
8086 list = tree_cons (NULL_TREE, elem, list);
8089 return build_vector (type, nreverse (list));
8094 case TRUTH_NOT_EXPR:
8095 /* The argument to invert_truthvalue must have Boolean type. */
8096 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8097 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8099 /* Note that the operand of this must be an int
8100 and its values must be 0 or 1.
8101 ("true" is a fixed value perhaps depending on the language,
8102 but we don't handle values other than 1 correctly yet.) */
8103 tem = fold_truth_not_expr (loc, arg0);
8106 return fold_convert_loc (loc, type, tem);
8109 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8110 return fold_convert_loc (loc, type, arg0);
8111 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8112 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8113 TREE_OPERAND (arg0, 1));
8114 if (TREE_CODE (arg0) == COMPLEX_CST)
8115 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8116 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8118 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8119 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8120 fold_build1_loc (loc, REALPART_EXPR, itype,
8121 TREE_OPERAND (arg0, 0)),
8122 fold_build1_loc (loc, REALPART_EXPR, itype,
8123 TREE_OPERAND (arg0, 1)));
8124 return fold_convert_loc (loc, type, tem);
8126 if (TREE_CODE (arg0) == CONJ_EXPR)
8128 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8129 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8130 TREE_OPERAND (arg0, 0));
8131 return fold_convert_loc (loc, type, tem);
8133 if (TREE_CODE (arg0) == CALL_EXPR)
8135 tree fn = get_callee_fndecl (arg0);
8136 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8137 switch (DECL_FUNCTION_CODE (fn))
8139 CASE_FLT_FN (BUILT_IN_CEXPI):
8140 fn = mathfn_built_in (type, BUILT_IN_COS);
8142 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8152 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8153 return build_zero_cst (type);
8154 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8155 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8156 TREE_OPERAND (arg0, 0));
8157 if (TREE_CODE (arg0) == COMPLEX_CST)
8158 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8159 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8161 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8162 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8163 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8164 TREE_OPERAND (arg0, 0)),
8165 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8166 TREE_OPERAND (arg0, 1)));
8167 return fold_convert_loc (loc, type, tem);
8169 if (TREE_CODE (arg0) == CONJ_EXPR)
8171 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8172 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8173 return fold_convert_loc (loc, type, negate_expr (tem));
8175 if (TREE_CODE (arg0) == CALL_EXPR)
8177 tree fn = get_callee_fndecl (arg0);
8178 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8179 switch (DECL_FUNCTION_CODE (fn))
8181 CASE_FLT_FN (BUILT_IN_CEXPI):
8182 fn = mathfn_built_in (type, BUILT_IN_SIN);
8184 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8194 /* Fold *&X to X if X is an lvalue. */
8195 if (TREE_CODE (op0) == ADDR_EXPR)
8197 tree op00 = TREE_OPERAND (op0, 0);
8198 if ((TREE_CODE (op00) == VAR_DECL
8199 || TREE_CODE (op00) == PARM_DECL
8200 || TREE_CODE (op00) == RESULT_DECL)
8201 && !TREE_READONLY (op00))
8208 } /* switch (code) */
8212 /* If the operation was a conversion do _not_ mark a resulting constant
8213 with TREE_OVERFLOW if the original constant was not. These conversions
8214 have implementation defined behavior and retaining the TREE_OVERFLOW
8215 flag here would confuse later passes such as VRP. */
8217 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8218 tree type, tree op0)
8220 tree res = fold_unary_loc (loc, code, type, op0);
8222 && TREE_CODE (res) == INTEGER_CST
8223 && TREE_CODE (op0) == INTEGER_CST
8224 && CONVERT_EXPR_CODE_P (code))
8225 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8230 /* Fold a binary expression of code CODE and type TYPE with operands
8231 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8232 Return the folded expression if folding is successful. Otherwise,
8233 return NULL_TREE. */
8236 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8238 enum tree_code compl_code;
8240 if (code == MIN_EXPR)
8241 compl_code = MAX_EXPR;
8242 else if (code == MAX_EXPR)
8243 compl_code = MIN_EXPR;
8247 /* MIN (MAX (a, b), b) == b. */
8248 if (TREE_CODE (op0) == compl_code
8249 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8250 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8252 /* MIN (MAX (b, a), b) == b. */
8253 if (TREE_CODE (op0) == compl_code
8254 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8255 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8256 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8258 /* MIN (a, MAX (a, b)) == a. */
8259 if (TREE_CODE (op1) == compl_code
8260 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8261 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8262 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8264 /* MIN (a, MAX (b, a)) == a. */
8265 if (TREE_CODE (op1) == compl_code
8266 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8267 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8268 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8273 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8274 by changing CODE to reduce the magnitude of constants involved in
8275 ARG0 of the comparison.
8276 Returns a canonicalized comparison tree if a simplification was
8277 possible, otherwise returns NULL_TREE.
8278 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8279 valid if signed overflow is undefined. */
8282 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8283 tree arg0, tree arg1,
8284 bool *strict_overflow_p)
8286 enum tree_code code0 = TREE_CODE (arg0);
8287 tree t, cst0 = NULL_TREE;
8291 /* Match A +- CST code arg1 and CST code arg1. We can change the
8292 first form only if overflow is undefined. */
8293 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8294 /* In principle pointers also have undefined overflow behavior,
8295 but that causes problems elsewhere. */
8296 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8297 && (code0 == MINUS_EXPR
8298 || code0 == PLUS_EXPR)
8299 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8300 || code0 == INTEGER_CST))
8303 /* Identify the constant in arg0 and its sign. */
8304 if (code0 == INTEGER_CST)
8307 cst0 = TREE_OPERAND (arg0, 1);
8308 sgn0 = tree_int_cst_sgn (cst0);
8310 /* Overflowed constants and zero will cause problems. */
8311 if (integer_zerop (cst0)
8312 || TREE_OVERFLOW (cst0))
8315 /* See if we can reduce the magnitude of the constant in
8316 arg0 by changing the comparison code. */
8317 if (code0 == INTEGER_CST)
8319 /* CST <= arg1 -> CST-1 < arg1. */
8320 if (code == LE_EXPR && sgn0 == 1)
8322 /* -CST < arg1 -> -CST-1 <= arg1. */
8323 else if (code == LT_EXPR && sgn0 == -1)
8325 /* CST > arg1 -> CST-1 >= arg1. */
8326 else if (code == GT_EXPR && sgn0 == 1)
8328 /* -CST >= arg1 -> -CST-1 > arg1. */
8329 else if (code == GE_EXPR && sgn0 == -1)
8333 /* arg1 code' CST' might be more canonical. */
8338 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8340 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8342 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8343 else if (code == GT_EXPR
8344 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8346 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8347 else if (code == LE_EXPR
8348 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8350 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8351 else if (code == GE_EXPR
8352 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8356 *strict_overflow_p = true;
8359 /* Now build the constant reduced in magnitude. But not if that
8360 would produce one outside of its types range. */
8361 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8363 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8364 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8366 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8367 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8368 /* We cannot swap the comparison here as that would cause us to
8369 endlessly recurse. */
8372 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8373 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8374 if (code0 != INTEGER_CST)
8375 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8377 /* If swapping might yield to a more canonical form, do so. */
8379 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8381 return fold_build2_loc (loc, code, type, t, arg1);
8384 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8385 overflow further. Try to decrease the magnitude of constants involved
8386 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8387 and put sole constants at the second argument position.
8388 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8391 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8392 tree arg0, tree arg1)
8395 bool strict_overflow_p;
8396 const char * const warnmsg = G_("assuming signed overflow does not occur "
8397 "when reducing constant in comparison");
8399 /* Try canonicalization by simplifying arg0. */
8400 strict_overflow_p = false;
8401 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8402 &strict_overflow_p);
8405 if (strict_overflow_p)
8406 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8410 /* Try canonicalization by simplifying arg1 using the swapped
8412 code = swap_tree_comparison (code);
8413 strict_overflow_p = false;
8414 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8415 &strict_overflow_p);
8416 if (t && strict_overflow_p)
8417 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8421 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8422 space. This is used to avoid issuing overflow warnings for
8423 expressions like &p->x which can not wrap. */
8426 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8428 unsigned HOST_WIDE_INT offset_low, total_low;
8429 HOST_WIDE_INT size, offset_high, total_high;
8431 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8437 if (offset == NULL_TREE)
8442 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8446 offset_low = TREE_INT_CST_LOW (offset);
8447 offset_high = TREE_INT_CST_HIGH (offset);
8450 if (add_double_with_sign (offset_low, offset_high,
8451 bitpos / BITS_PER_UNIT, 0,
8452 &total_low, &total_high,
8456 if (total_high != 0)
8459 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8463 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8465 if (TREE_CODE (base) == ADDR_EXPR)
8467 HOST_WIDE_INT base_size;
8469 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8470 if (base_size > 0 && size < base_size)
8474 return total_low > (unsigned HOST_WIDE_INT) size;
8477 /* Subroutine of fold_binary. This routine performs all of the
8478 transformations that are common to the equality/inequality
8479 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8480 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8481 fold_binary should call fold_binary. Fold a comparison with
8482 tree code CODE and type TYPE with operands OP0 and OP1. Return
8483 the folded comparison or NULL_TREE. */
8486 fold_comparison (location_t loc, enum tree_code code, tree type,
8489 tree arg0, arg1, tem;
8494 STRIP_SIGN_NOPS (arg0);
8495 STRIP_SIGN_NOPS (arg1);
8497 tem = fold_relational_const (code, type, arg0, arg1);
8498 if (tem != NULL_TREE)
8501 /* If one arg is a real or integer constant, put it last. */
8502 if (tree_swap_operands_p (arg0, arg1, true))
8503 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8505 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8506 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8507 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8508 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8509 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8510 && (TREE_CODE (arg1) == INTEGER_CST
8511 && !TREE_OVERFLOW (arg1)))
8513 tree const1 = TREE_OPERAND (arg0, 1);
8515 tree variable = TREE_OPERAND (arg0, 0);
8518 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8520 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8521 TREE_TYPE (arg1), const2, const1);
8523 /* If the constant operation overflowed this can be
8524 simplified as a comparison against INT_MAX/INT_MIN. */
8525 if (TREE_CODE (lhs) == INTEGER_CST
8526 && TREE_OVERFLOW (lhs))
8528 int const1_sgn = tree_int_cst_sgn (const1);
8529 enum tree_code code2 = code;
8531 /* Get the sign of the constant on the lhs if the
8532 operation were VARIABLE + CONST1. */
8533 if (TREE_CODE (arg0) == MINUS_EXPR)
8534 const1_sgn = -const1_sgn;
8536 /* The sign of the constant determines if we overflowed
8537 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8538 Canonicalize to the INT_MIN overflow by swapping the comparison
8540 if (const1_sgn == -1)
8541 code2 = swap_tree_comparison (code);
8543 /* We now can look at the canonicalized case
8544 VARIABLE + 1 CODE2 INT_MIN
8545 and decide on the result. */
8546 if (code2 == LT_EXPR
8548 || code2 == EQ_EXPR)
8549 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8550 else if (code2 == NE_EXPR
8552 || code2 == GT_EXPR)
8553 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8556 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8557 && (TREE_CODE (lhs) != INTEGER_CST
8558 || !TREE_OVERFLOW (lhs)))
8560 if (code != EQ_EXPR && code != NE_EXPR)
8561 fold_overflow_warning ("assuming signed overflow does not occur "
8562 "when changing X +- C1 cmp C2 to "
8564 WARN_STRICT_OVERFLOW_COMPARISON);
8565 return fold_build2_loc (loc, code, type, variable, lhs);
8569 /* For comparisons of pointers we can decompose it to a compile time
8570 comparison of the base objects and the offsets into the object.
8571 This requires at least one operand being an ADDR_EXPR or a
8572 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8573 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8574 && (TREE_CODE (arg0) == ADDR_EXPR
8575 || TREE_CODE (arg1) == ADDR_EXPR
8576 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8577 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8579 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8580 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8581 enum machine_mode mode;
8582 int volatilep, unsignedp;
8583 bool indirect_base0 = false, indirect_base1 = false;
8585 /* Get base and offset for the access. Strip ADDR_EXPR for
8586 get_inner_reference, but put it back by stripping INDIRECT_REF
8587 off the base object if possible. indirect_baseN will be true
8588 if baseN is not an address but refers to the object itself. */
8590 if (TREE_CODE (arg0) == ADDR_EXPR)
8592 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8593 &bitsize, &bitpos0, &offset0, &mode,
8594 &unsignedp, &volatilep, false);
8595 if (TREE_CODE (base0) == INDIRECT_REF)
8596 base0 = TREE_OPERAND (base0, 0);
8598 indirect_base0 = true;
8600 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8602 base0 = TREE_OPERAND (arg0, 0);
8603 STRIP_SIGN_NOPS (base0);
8604 if (TREE_CODE (base0) == ADDR_EXPR)
8606 base0 = TREE_OPERAND (base0, 0);
8607 indirect_base0 = true;
8609 offset0 = TREE_OPERAND (arg0, 1);
8613 if (TREE_CODE (arg1) == ADDR_EXPR)
8615 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8616 &bitsize, &bitpos1, &offset1, &mode,
8617 &unsignedp, &volatilep, false);
8618 if (TREE_CODE (base1) == INDIRECT_REF)
8619 base1 = TREE_OPERAND (base1, 0);
8621 indirect_base1 = true;
8623 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8625 base1 = TREE_OPERAND (arg1, 0);
8626 STRIP_SIGN_NOPS (base1);
8627 if (TREE_CODE (base1) == ADDR_EXPR)
8629 base1 = TREE_OPERAND (base1, 0);
8630 indirect_base1 = true;
8632 offset1 = TREE_OPERAND (arg1, 1);
8635 /* A local variable can never be pointed to by
8636 the default SSA name of an incoming parameter. */
8637 if ((TREE_CODE (arg0) == ADDR_EXPR
8639 && TREE_CODE (base0) == VAR_DECL
8640 && auto_var_in_fn_p (base0, current_function_decl)
8642 && TREE_CODE (base1) == SSA_NAME
8643 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8644 && SSA_NAME_IS_DEFAULT_DEF (base1))
8645 || (TREE_CODE (arg1) == ADDR_EXPR
8647 && TREE_CODE (base1) == VAR_DECL
8648 && auto_var_in_fn_p (base1, current_function_decl)
8650 && TREE_CODE (base0) == SSA_NAME
8651 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8652 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8654 if (code == NE_EXPR)
8655 return constant_boolean_node (1, type);
8656 else if (code == EQ_EXPR)
8657 return constant_boolean_node (0, type);
8659 /* If we have equivalent bases we might be able to simplify. */
8660 else if (indirect_base0 == indirect_base1
8661 && operand_equal_p (base0, base1, 0))
8663 /* We can fold this expression to a constant if the non-constant
8664 offset parts are equal. */
8665 if ((offset0 == offset1
8666 || (offset0 && offset1
8667 && operand_equal_p (offset0, offset1, 0)))
8670 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8675 && bitpos0 != bitpos1
8676 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8677 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8678 fold_overflow_warning (("assuming pointer wraparound does not "
8679 "occur when comparing P +- C1 with "
8681 WARN_STRICT_OVERFLOW_CONDITIONAL);
8686 return constant_boolean_node (bitpos0 == bitpos1, type);
8688 return constant_boolean_node (bitpos0 != bitpos1, type);
8690 return constant_boolean_node (bitpos0 < bitpos1, type);
8692 return constant_boolean_node (bitpos0 <= bitpos1, type);
8694 return constant_boolean_node (bitpos0 >= bitpos1, type);
8696 return constant_boolean_node (bitpos0 > bitpos1, type);
8700 /* We can simplify the comparison to a comparison of the variable
8701 offset parts if the constant offset parts are equal.
8702 Be careful to use signed size type here because otherwise we
8703 mess with array offsets in the wrong way. This is possible
8704 because pointer arithmetic is restricted to retain within an
8705 object and overflow on pointer differences is undefined as of
8706 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8707 else if (bitpos0 == bitpos1
8708 && ((code == EQ_EXPR || code == NE_EXPR)
8709 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8711 /* By converting to signed size type we cover middle-end pointer
8712 arithmetic which operates on unsigned pointer types of size
8713 type size and ARRAY_REF offsets which are properly sign or
8714 zero extended from their type in case it is narrower than
8716 if (offset0 == NULL_TREE)
8717 offset0 = build_int_cst (ssizetype, 0);
8719 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8720 if (offset1 == NULL_TREE)
8721 offset1 = build_int_cst (ssizetype, 0);
8723 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8727 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8728 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8729 fold_overflow_warning (("assuming pointer wraparound does not "
8730 "occur when comparing P +- C1 with "
8732 WARN_STRICT_OVERFLOW_COMPARISON);
8734 return fold_build2_loc (loc, code, type, offset0, offset1);
8737 /* For non-equal bases we can simplify if they are addresses
8738 of local binding decls or constants. */
8739 else if (indirect_base0 && indirect_base1
8740 /* We know that !operand_equal_p (base0, base1, 0)
8741 because the if condition was false. But make
8742 sure two decls are not the same. */
8744 && TREE_CODE (arg0) == ADDR_EXPR
8745 && TREE_CODE (arg1) == ADDR_EXPR
8746 && (((TREE_CODE (base0) == VAR_DECL
8747 || TREE_CODE (base0) == PARM_DECL)
8748 && (targetm.binds_local_p (base0)
8749 || CONSTANT_CLASS_P (base1)))
8750 || CONSTANT_CLASS_P (base0))
8751 && (((TREE_CODE (base1) == VAR_DECL
8752 || TREE_CODE (base1) == PARM_DECL)
8753 && (targetm.binds_local_p (base1)
8754 || CONSTANT_CLASS_P (base0)))
8755 || CONSTANT_CLASS_P (base1)))
8757 if (code == EQ_EXPR)
8758 return omit_two_operands_loc (loc, type, boolean_false_node,
8760 else if (code == NE_EXPR)
8761 return omit_two_operands_loc (loc, type, boolean_true_node,
8764 /* For equal offsets we can simplify to a comparison of the
8766 else if (bitpos0 == bitpos1
8768 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8770 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8771 && ((offset0 == offset1)
8772 || (offset0 && offset1
8773 && operand_equal_p (offset0, offset1, 0))))
8776 base0 = build_fold_addr_expr_loc (loc, base0);
8778 base1 = build_fold_addr_expr_loc (loc, base1);
8779 return fold_build2_loc (loc, code, type, base0, base1);
8783 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8784 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8785 the resulting offset is smaller in absolute value than the
8787 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8788 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8789 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8790 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8791 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8792 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8793 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8795 tree const1 = TREE_OPERAND (arg0, 1);
8796 tree const2 = TREE_OPERAND (arg1, 1);
8797 tree variable1 = TREE_OPERAND (arg0, 0);
8798 tree variable2 = TREE_OPERAND (arg1, 0);
8800 const char * const warnmsg = G_("assuming signed overflow does not "
8801 "occur when combining constants around "
8804 /* Put the constant on the side where it doesn't overflow and is
8805 of lower absolute value than before. */
8806 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8807 ? MINUS_EXPR : PLUS_EXPR,
8809 if (!TREE_OVERFLOW (cst)
8810 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8812 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8813 return fold_build2_loc (loc, code, type,
8815 fold_build2_loc (loc,
8816 TREE_CODE (arg1), TREE_TYPE (arg1),
8820 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8821 ? MINUS_EXPR : PLUS_EXPR,
8823 if (!TREE_OVERFLOW (cst)
8824 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8826 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8827 return fold_build2_loc (loc, code, type,
8828 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8834 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8835 signed arithmetic case. That form is created by the compiler
8836 often enough for folding it to be of value. One example is in
8837 computing loop trip counts after Operator Strength Reduction. */
8838 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8839 && TREE_CODE (arg0) == MULT_EXPR
8840 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8841 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8842 && integer_zerop (arg1))
8844 tree const1 = TREE_OPERAND (arg0, 1);
8845 tree const2 = arg1; /* zero */
8846 tree variable1 = TREE_OPERAND (arg0, 0);
8847 enum tree_code cmp_code = code;
8849 /* Handle unfolded multiplication by zero. */
8850 if (integer_zerop (const1))
8851 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8853 fold_overflow_warning (("assuming signed overflow does not occur when "
8854 "eliminating multiplication in comparison "
8856 WARN_STRICT_OVERFLOW_COMPARISON);
8858 /* If const1 is negative we swap the sense of the comparison. */
8859 if (tree_int_cst_sgn (const1) < 0)
8860 cmp_code = swap_tree_comparison (cmp_code);
8862 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8865 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8869 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8871 tree targ0 = strip_float_extensions (arg0);
8872 tree targ1 = strip_float_extensions (arg1);
8873 tree newtype = TREE_TYPE (targ0);
8875 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8876 newtype = TREE_TYPE (targ1);
8878 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8879 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8880 return fold_build2_loc (loc, code, type,
8881 fold_convert_loc (loc, newtype, targ0),
8882 fold_convert_loc (loc, newtype, targ1));
8884 /* (-a) CMP (-b) -> b CMP a */
8885 if (TREE_CODE (arg0) == NEGATE_EXPR
8886 && TREE_CODE (arg1) == NEGATE_EXPR)
8887 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8888 TREE_OPERAND (arg0, 0));
8890 if (TREE_CODE (arg1) == REAL_CST)
8892 REAL_VALUE_TYPE cst;
8893 cst = TREE_REAL_CST (arg1);
8895 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8896 if (TREE_CODE (arg0) == NEGATE_EXPR)
8897 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8898 TREE_OPERAND (arg0, 0),
8899 build_real (TREE_TYPE (arg1),
8900 real_value_negate (&cst)));
8902 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8903 /* a CMP (-0) -> a CMP 0 */
8904 if (REAL_VALUE_MINUS_ZERO (cst))
8905 return fold_build2_loc (loc, code, type, arg0,
8906 build_real (TREE_TYPE (arg1), dconst0));
8908 /* x != NaN is always true, other ops are always false. */
8909 if (REAL_VALUE_ISNAN (cst)
8910 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8912 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8913 return omit_one_operand_loc (loc, type, tem, arg0);
8916 /* Fold comparisons against infinity. */
8917 if (REAL_VALUE_ISINF (cst)
8918 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8920 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8921 if (tem != NULL_TREE)
8926 /* If this is a comparison of a real constant with a PLUS_EXPR
8927 or a MINUS_EXPR of a real constant, we can convert it into a
8928 comparison with a revised real constant as long as no overflow
8929 occurs when unsafe_math_optimizations are enabled. */
8930 if (flag_unsafe_math_optimizations
8931 && TREE_CODE (arg1) == REAL_CST
8932 && (TREE_CODE (arg0) == PLUS_EXPR
8933 || TREE_CODE (arg0) == MINUS_EXPR)
8934 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8935 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8936 ? MINUS_EXPR : PLUS_EXPR,
8937 arg1, TREE_OPERAND (arg0, 1)))
8938 && !TREE_OVERFLOW (tem))
8939 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8941 /* Likewise, we can simplify a comparison of a real constant with
8942 a MINUS_EXPR whose first operand is also a real constant, i.e.
8943 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8944 floating-point types only if -fassociative-math is set. */
8945 if (flag_associative_math
8946 && TREE_CODE (arg1) == REAL_CST
8947 && TREE_CODE (arg0) == MINUS_EXPR
8948 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8949 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8951 && !TREE_OVERFLOW (tem))
8952 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8953 TREE_OPERAND (arg0, 1), tem);
8955 /* Fold comparisons against built-in math functions. */
8956 if (TREE_CODE (arg1) == REAL_CST
8957 && flag_unsafe_math_optimizations
8958 && ! flag_errno_math)
8960 enum built_in_function fcode = builtin_mathfn_code (arg0);
8962 if (fcode != END_BUILTINS)
8964 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
8965 if (tem != NULL_TREE)
8971 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8972 && CONVERT_EXPR_P (arg0))
8974 /* If we are widening one operand of an integer comparison,
8975 see if the other operand is similarly being widened. Perhaps we
8976 can do the comparison in the narrower type. */
8977 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8981 /* Or if we are changing signedness. */
8982 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8987 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8988 constant, we can simplify it. */
8989 if (TREE_CODE (arg1) == INTEGER_CST
8990 && (TREE_CODE (arg0) == MIN_EXPR
8991 || TREE_CODE (arg0) == MAX_EXPR)
8992 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8994 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8999 /* Simplify comparison of something with itself. (For IEEE
9000 floating-point, we can only do some of these simplifications.) */
9001 if (operand_equal_p (arg0, arg1, 0))
9006 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9007 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9008 return constant_boolean_node (1, type);
9013 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9014 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9015 return constant_boolean_node (1, type);
9016 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9019 /* For NE, we can only do this simplification if integer
9020 or we don't honor IEEE floating point NaNs. */
9021 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9022 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9024 /* ... fall through ... */
9027 return constant_boolean_node (0, type);
9033 /* If we are comparing an expression that just has comparisons
9034 of two integer values, arithmetic expressions of those comparisons,
9035 and constants, we can simplify it. There are only three cases
9036 to check: the two values can either be equal, the first can be
9037 greater, or the second can be greater. Fold the expression for
9038 those three values. Since each value must be 0 or 1, we have
9039 eight possibilities, each of which corresponds to the constant 0
9040 or 1 or one of the six possible comparisons.
9042 This handles common cases like (a > b) == 0 but also handles
9043 expressions like ((x > y) - (y > x)) > 0, which supposedly
9044 occur in macroized code. */
9046 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9048 tree cval1 = 0, cval2 = 0;
9051 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9052 /* Don't handle degenerate cases here; they should already
9053 have been handled anyway. */
9054 && cval1 != 0 && cval2 != 0
9055 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9056 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9057 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9058 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9059 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9060 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9061 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9063 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9064 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9066 /* We can't just pass T to eval_subst in case cval1 or cval2
9067 was the same as ARG1. */
9070 = fold_build2_loc (loc, code, type,
9071 eval_subst (loc, arg0, cval1, maxval,
9075 = fold_build2_loc (loc, code, type,
9076 eval_subst (loc, arg0, cval1, maxval,
9080 = fold_build2_loc (loc, code, type,
9081 eval_subst (loc, arg0, cval1, minval,
9085 /* All three of these results should be 0 or 1. Confirm they are.
9086 Then use those values to select the proper code to use. */
9088 if (TREE_CODE (high_result) == INTEGER_CST
9089 && TREE_CODE (equal_result) == INTEGER_CST
9090 && TREE_CODE (low_result) == INTEGER_CST)
9092 /* Make a 3-bit mask with the high-order bit being the
9093 value for `>', the next for '=', and the low for '<'. */
9094 switch ((integer_onep (high_result) * 4)
9095 + (integer_onep (equal_result) * 2)
9096 + integer_onep (low_result))
9100 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9121 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9126 tem = save_expr (build2 (code, type, cval1, cval2));
9127 SET_EXPR_LOCATION (tem, loc);
9130 return fold_build2_loc (loc, code, type, cval1, cval2);
9135 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9136 into a single range test. */
9137 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9138 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9139 && TREE_CODE (arg1) == INTEGER_CST
9140 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9141 && !integer_zerop (TREE_OPERAND (arg0, 1))
9142 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9143 && !TREE_OVERFLOW (arg1))
9145 tem = fold_div_compare (loc, code, type, arg0, arg1);
9146 if (tem != NULL_TREE)
9150 /* Fold ~X op ~Y as Y op X. */
9151 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9152 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9154 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9155 return fold_build2_loc (loc, code, type,
9156 fold_convert_loc (loc, cmp_type,
9157 TREE_OPERAND (arg1, 0)),
9158 TREE_OPERAND (arg0, 0));
9161 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9163 && TREE_CODE (arg1) == INTEGER_CST)
9165 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9166 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9167 TREE_OPERAND (arg0, 0),
9168 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9169 fold_convert_loc (loc, cmp_type, arg1)));
9176 /* Subroutine of fold_binary. Optimize complex multiplications of the
9177 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9178 argument EXPR represents the expression "z" of type TYPE. */
9181 fold_mult_zconjz (location_t loc, tree type, tree expr)
9183 tree itype = TREE_TYPE (type);
9184 tree rpart, ipart, tem;
9186 if (TREE_CODE (expr) == COMPLEX_EXPR)
9188 rpart = TREE_OPERAND (expr, 0);
9189 ipart = TREE_OPERAND (expr, 1);
9191 else if (TREE_CODE (expr) == COMPLEX_CST)
9193 rpart = TREE_REALPART (expr);
9194 ipart = TREE_IMAGPART (expr);
9198 expr = save_expr (expr);
9199 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9200 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9203 rpart = save_expr (rpart);
9204 ipart = save_expr (ipart);
9205 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9206 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9207 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9208 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9209 build_zero_cst (itype));
9213 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9214 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9215 guarantees that P and N have the same least significant log2(M) bits.
9216 N is not otherwise constrained. In particular, N is not normalized to
9217 0 <= N < M as is common. In general, the precise value of P is unknown.
9218 M is chosen as large as possible such that constant N can be determined.
9220 Returns M and sets *RESIDUE to N.
9222 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9223 account. This is not always possible due to PR 35705.
9226 static unsigned HOST_WIDE_INT
9227 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9228 bool allow_func_align)
9230 enum tree_code code;
9234 code = TREE_CODE (expr);
9235 if (code == ADDR_EXPR)
9237 unsigned int bitalign;
9238 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9239 *residue /= BITS_PER_UNIT;
9240 return bitalign / BITS_PER_UNIT;
9242 else if (code == POINTER_PLUS_EXPR)
9245 unsigned HOST_WIDE_INT modulus;
9246 enum tree_code inner_code;
9248 op0 = TREE_OPERAND (expr, 0);
9250 modulus = get_pointer_modulus_and_residue (op0, residue,
9253 op1 = TREE_OPERAND (expr, 1);
9255 inner_code = TREE_CODE (op1);
9256 if (inner_code == INTEGER_CST)
9258 *residue += TREE_INT_CST_LOW (op1);
9261 else if (inner_code == MULT_EXPR)
9263 op1 = TREE_OPERAND (op1, 1);
9264 if (TREE_CODE (op1) == INTEGER_CST)
9266 unsigned HOST_WIDE_INT align;
9268 /* Compute the greatest power-of-2 divisor of op1. */
9269 align = TREE_INT_CST_LOW (op1);
9272 /* If align is non-zero and less than *modulus, replace
9273 *modulus with align., If align is 0, then either op1 is 0
9274 or the greatest power-of-2 divisor of op1 doesn't fit in an
9275 unsigned HOST_WIDE_INT. In either case, no additional
9276 constraint is imposed. */
9278 modulus = MIN (modulus, align);
9285 /* If we get here, we were unable to determine anything useful about the
9291 /* Fold a binary expression of code CODE and type TYPE with operands
9292 OP0 and OP1. LOC is the location of the resulting expression.
9293 Return the folded expression if folding is successful. Otherwise,
9294 return NULL_TREE. */
9297 fold_binary_loc (location_t loc,
9298 enum tree_code code, tree type, tree op0, tree op1)
9300 enum tree_code_class kind = TREE_CODE_CLASS (code);
9301 tree arg0, arg1, tem;
9302 tree t1 = NULL_TREE;
9303 bool strict_overflow_p;
9305 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9306 && TREE_CODE_LENGTH (code) == 2
9308 && op1 != NULL_TREE);
9313 /* Strip any conversions that don't change the mode. This is
9314 safe for every expression, except for a comparison expression
9315 because its signedness is derived from its operands. So, in
9316 the latter case, only strip conversions that don't change the
9317 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9320 Note that this is done as an internal manipulation within the
9321 constant folder, in order to find the simplest representation
9322 of the arguments so that their form can be studied. In any
9323 cases, the appropriate type conversions should be put back in
9324 the tree that will get out of the constant folder. */
9326 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9328 STRIP_SIGN_NOPS (arg0);
9329 STRIP_SIGN_NOPS (arg1);
9337 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9338 constant but we can't do arithmetic on them. */
9339 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9340 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9341 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9342 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9343 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9344 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9346 if (kind == tcc_binary)
9348 /* Make sure type and arg0 have the same saturating flag. */
9349 gcc_assert (TYPE_SATURATING (type)
9350 == TYPE_SATURATING (TREE_TYPE (arg0)));
9351 tem = const_binop (code, arg0, arg1);
9353 else if (kind == tcc_comparison)
9354 tem = fold_relational_const (code, type, arg0, arg1);
9358 if (tem != NULL_TREE)
9360 if (TREE_TYPE (tem) != type)
9361 tem = fold_convert_loc (loc, type, tem);
9366 /* If this is a commutative operation, and ARG0 is a constant, move it
9367 to ARG1 to reduce the number of tests below. */
9368 if (commutative_tree_code (code)
9369 && tree_swap_operands_p (arg0, arg1, true))
9370 return fold_build2_loc (loc, code, type, op1, op0);
9372 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9374 First check for cases where an arithmetic operation is applied to a
9375 compound, conditional, or comparison operation. Push the arithmetic
9376 operation inside the compound or conditional to see if any folding
9377 can then be done. Convert comparison to conditional for this purpose.
9378 The also optimizes non-constant cases that used to be done in
9381 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9382 one of the operands is a comparison and the other is a comparison, a
9383 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9384 code below would make the expression more complex. Change it to a
9385 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9386 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9388 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9389 || code == EQ_EXPR || code == NE_EXPR)
9390 && ((truth_value_p (TREE_CODE (arg0))
9391 && (truth_value_p (TREE_CODE (arg1))
9392 || (TREE_CODE (arg1) == BIT_AND_EXPR
9393 && integer_onep (TREE_OPERAND (arg1, 1)))))
9394 || (truth_value_p (TREE_CODE (arg1))
9395 && (truth_value_p (TREE_CODE (arg0))
9396 || (TREE_CODE (arg0) == BIT_AND_EXPR
9397 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9399 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9400 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9403 fold_convert_loc (loc, boolean_type_node, arg0),
9404 fold_convert_loc (loc, boolean_type_node, arg1));
9406 if (code == EQ_EXPR)
9407 tem = invert_truthvalue_loc (loc, tem);
9409 return fold_convert_loc (loc, type, tem);
9412 if (TREE_CODE_CLASS (code) == tcc_binary
9413 || TREE_CODE_CLASS (code) == tcc_comparison)
9415 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9417 tem = fold_build2_loc (loc, code, type,
9418 fold_convert_loc (loc, TREE_TYPE (op0),
9419 TREE_OPERAND (arg0, 1)), op1);
9420 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9423 if (TREE_CODE (arg1) == COMPOUND_EXPR
9424 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9426 tem = fold_build2_loc (loc, code, type, op0,
9427 fold_convert_loc (loc, TREE_TYPE (op1),
9428 TREE_OPERAND (arg1, 1)));
9429 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9433 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9435 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9437 /*cond_first_p=*/1);
9438 if (tem != NULL_TREE)
9442 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9444 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9446 /*cond_first_p=*/0);
9447 if (tem != NULL_TREE)
9455 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9456 if (TREE_CODE (arg0) == ADDR_EXPR
9457 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9459 tree iref = TREE_OPERAND (arg0, 0);
9460 return fold_build2 (MEM_REF, type,
9461 TREE_OPERAND (iref, 0),
9462 int_const_binop (PLUS_EXPR, arg1,
9463 TREE_OPERAND (iref, 1)));
9466 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9467 if (TREE_CODE (arg0) == ADDR_EXPR
9468 && handled_component_p (TREE_OPERAND (arg0, 0)))
9471 HOST_WIDE_INT coffset;
9472 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9476 return fold_build2 (MEM_REF, type,
9477 build_fold_addr_expr (base),
9478 int_const_binop (PLUS_EXPR, arg1,
9479 size_int (coffset)));
9484 case POINTER_PLUS_EXPR:
9485 /* 0 +p index -> (type)index */
9486 if (integer_zerop (arg0))
9487 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9489 /* PTR +p 0 -> PTR */
9490 if (integer_zerop (arg1))
9491 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9493 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9494 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9495 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9496 return fold_convert_loc (loc, type,
9497 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9498 fold_convert_loc (loc, sizetype,
9500 fold_convert_loc (loc, sizetype,
9503 /* index +p PTR -> PTR +p index */
9504 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9505 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9506 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9507 fold_convert_loc (loc, type, arg1),
9508 fold_convert_loc (loc, sizetype, arg0));
9510 /* (PTR +p B) +p A -> PTR +p (B + A) */
9511 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9514 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9515 tree arg00 = TREE_OPERAND (arg0, 0);
9516 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9517 arg01, fold_convert_loc (loc, sizetype, arg1));
9518 return fold_convert_loc (loc, type,
9519 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9524 /* PTR_CST +p CST -> CST1 */
9525 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9526 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9527 fold_convert_loc (loc, type, arg1));
9529 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9530 of the array. Loop optimizer sometimes produce this type of
9532 if (TREE_CODE (arg0) == ADDR_EXPR)
9534 tem = try_move_mult_to_index (loc, arg0,
9535 fold_convert_loc (loc, sizetype, arg1));
9537 return fold_convert_loc (loc, type, tem);
9543 /* A + (-B) -> A - B */
9544 if (TREE_CODE (arg1) == NEGATE_EXPR)
9545 return fold_build2_loc (loc, MINUS_EXPR, type,
9546 fold_convert_loc (loc, type, arg0),
9547 fold_convert_loc (loc, type,
9548 TREE_OPERAND (arg1, 0)));
9549 /* (-A) + B -> B - A */
9550 if (TREE_CODE (arg0) == NEGATE_EXPR
9551 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9552 return fold_build2_loc (loc, MINUS_EXPR, type,
9553 fold_convert_loc (loc, type, arg1),
9554 fold_convert_loc (loc, type,
9555 TREE_OPERAND (arg0, 0)));
9557 if (INTEGRAL_TYPE_P (type))
9559 /* Convert ~A + 1 to -A. */
9560 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9561 && integer_onep (arg1))
9562 return fold_build1_loc (loc, NEGATE_EXPR, type,
9563 fold_convert_loc (loc, type,
9564 TREE_OPERAND (arg0, 0)));
9567 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9568 && !TYPE_OVERFLOW_TRAPS (type))
9570 tree tem = TREE_OPERAND (arg0, 0);
9573 if (operand_equal_p (tem, arg1, 0))
9575 t1 = build_int_cst_type (type, -1);
9576 return omit_one_operand_loc (loc, type, t1, arg1);
9581 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9582 && !TYPE_OVERFLOW_TRAPS (type))
9584 tree tem = TREE_OPERAND (arg1, 0);
9587 if (operand_equal_p (arg0, tem, 0))
9589 t1 = build_int_cst_type (type, -1);
9590 return omit_one_operand_loc (loc, type, t1, arg0);
9594 /* X + (X / CST) * -CST is X % CST. */
9595 if (TREE_CODE (arg1) == MULT_EXPR
9596 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9597 && operand_equal_p (arg0,
9598 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9600 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9601 tree cst1 = TREE_OPERAND (arg1, 1);
9602 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9604 if (sum && integer_zerop (sum))
9605 return fold_convert_loc (loc, type,
9606 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9607 TREE_TYPE (arg0), arg0,
9612 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9613 same or one. Make sure type is not saturating.
9614 fold_plusminus_mult_expr will re-associate. */
9615 if ((TREE_CODE (arg0) == MULT_EXPR
9616 || TREE_CODE (arg1) == MULT_EXPR)
9617 && !TYPE_SATURATING (type)
9618 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9620 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9625 if (! FLOAT_TYPE_P (type))
9627 if (integer_zerop (arg1))
9628 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9630 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9631 with a constant, and the two constants have no bits in common,
9632 we should treat this as a BIT_IOR_EXPR since this may produce more
9634 if (TREE_CODE (arg0) == BIT_AND_EXPR
9635 && TREE_CODE (arg1) == BIT_AND_EXPR
9636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9637 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9638 && integer_zerop (const_binop (BIT_AND_EXPR,
9639 TREE_OPERAND (arg0, 1),
9640 TREE_OPERAND (arg1, 1))))
9642 code = BIT_IOR_EXPR;
9646 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9647 (plus (plus (mult) (mult)) (foo)) so that we can
9648 take advantage of the factoring cases below. */
9649 if (((TREE_CODE (arg0) == PLUS_EXPR
9650 || TREE_CODE (arg0) == MINUS_EXPR)
9651 && TREE_CODE (arg1) == MULT_EXPR)
9652 || ((TREE_CODE (arg1) == PLUS_EXPR
9653 || TREE_CODE (arg1) == MINUS_EXPR)
9654 && TREE_CODE (arg0) == MULT_EXPR))
9656 tree parg0, parg1, parg, marg;
9657 enum tree_code pcode;
9659 if (TREE_CODE (arg1) == MULT_EXPR)
9660 parg = arg0, marg = arg1;
9662 parg = arg1, marg = arg0;
9663 pcode = TREE_CODE (parg);
9664 parg0 = TREE_OPERAND (parg, 0);
9665 parg1 = TREE_OPERAND (parg, 1);
9669 if (TREE_CODE (parg0) == MULT_EXPR
9670 && TREE_CODE (parg1) != MULT_EXPR)
9671 return fold_build2_loc (loc, pcode, type,
9672 fold_build2_loc (loc, PLUS_EXPR, type,
9673 fold_convert_loc (loc, type,
9675 fold_convert_loc (loc, type,
9677 fold_convert_loc (loc, type, parg1));
9678 if (TREE_CODE (parg0) != MULT_EXPR
9679 && TREE_CODE (parg1) == MULT_EXPR)
9681 fold_build2_loc (loc, PLUS_EXPR, type,
9682 fold_convert_loc (loc, type, parg0),
9683 fold_build2_loc (loc, pcode, type,
9684 fold_convert_loc (loc, type, marg),
9685 fold_convert_loc (loc, type,
9691 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9692 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9693 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9695 /* Likewise if the operands are reversed. */
9696 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9697 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9699 /* Convert X + -C into X - C. */
9700 if (TREE_CODE (arg1) == REAL_CST
9701 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9703 tem = fold_negate_const (arg1, type);
9704 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9705 return fold_build2_loc (loc, MINUS_EXPR, type,
9706 fold_convert_loc (loc, type, arg0),
9707 fold_convert_loc (loc, type, tem));
9710 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9711 to __complex__ ( x, y ). This is not the same for SNaNs or
9712 if signed zeros are involved. */
9713 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9714 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9715 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9717 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9718 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9719 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9720 bool arg0rz = false, arg0iz = false;
9721 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9722 || (arg0i && (arg0iz = real_zerop (arg0i))))
9724 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9725 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9726 if (arg0rz && arg1i && real_zerop (arg1i))
9728 tree rp = arg1r ? arg1r
9729 : build1 (REALPART_EXPR, rtype, arg1);
9730 tree ip = arg0i ? arg0i
9731 : build1 (IMAGPART_EXPR, rtype, arg0);
9732 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9734 else if (arg0iz && arg1r && real_zerop (arg1r))
9736 tree rp = arg0r ? arg0r
9737 : build1 (REALPART_EXPR, rtype, arg0);
9738 tree ip = arg1i ? arg1i
9739 : build1 (IMAGPART_EXPR, rtype, arg1);
9740 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9745 if (flag_unsafe_math_optimizations
9746 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9747 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9748 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9751 /* Convert x+x into x*2.0. */
9752 if (operand_equal_p (arg0, arg1, 0)
9753 && SCALAR_FLOAT_TYPE_P (type))
9754 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9755 build_real (type, dconst2));
9757 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9758 We associate floats only if the user has specified
9759 -fassociative-math. */
9760 if (flag_associative_math
9761 && TREE_CODE (arg1) == PLUS_EXPR
9762 && TREE_CODE (arg0) != MULT_EXPR)
9764 tree tree10 = TREE_OPERAND (arg1, 0);
9765 tree tree11 = TREE_OPERAND (arg1, 1);
9766 if (TREE_CODE (tree11) == MULT_EXPR
9767 && TREE_CODE (tree10) == MULT_EXPR)
9770 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9771 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9774 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9775 We associate floats only if the user has specified
9776 -fassociative-math. */
9777 if (flag_associative_math
9778 && TREE_CODE (arg0) == PLUS_EXPR
9779 && TREE_CODE (arg1) != MULT_EXPR)
9781 tree tree00 = TREE_OPERAND (arg0, 0);
9782 tree tree01 = TREE_OPERAND (arg0, 1);
9783 if (TREE_CODE (tree01) == MULT_EXPR
9784 && TREE_CODE (tree00) == MULT_EXPR)
9787 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9788 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9794 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9795 is a rotate of A by C1 bits. */
9796 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9797 is a rotate of A by B bits. */
9799 enum tree_code code0, code1;
9801 code0 = TREE_CODE (arg0);
9802 code1 = TREE_CODE (arg1);
9803 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9804 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9805 && operand_equal_p (TREE_OPERAND (arg0, 0),
9806 TREE_OPERAND (arg1, 0), 0)
9807 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9808 TYPE_UNSIGNED (rtype))
9809 /* Only create rotates in complete modes. Other cases are not
9810 expanded properly. */
9811 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9813 tree tree01, tree11;
9814 enum tree_code code01, code11;
9816 tree01 = TREE_OPERAND (arg0, 1);
9817 tree11 = TREE_OPERAND (arg1, 1);
9818 STRIP_NOPS (tree01);
9819 STRIP_NOPS (tree11);
9820 code01 = TREE_CODE (tree01);
9821 code11 = TREE_CODE (tree11);
9822 if (code01 == INTEGER_CST
9823 && code11 == INTEGER_CST
9824 && TREE_INT_CST_HIGH (tree01) == 0
9825 && TREE_INT_CST_HIGH (tree11) == 0
9826 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9827 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9829 tem = build2_loc (loc, LROTATE_EXPR,
9830 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9831 TREE_OPERAND (arg0, 0),
9832 code0 == LSHIFT_EXPR ? tree01 : tree11);
9833 return fold_convert_loc (loc, type, tem);
9835 else if (code11 == MINUS_EXPR)
9837 tree tree110, tree111;
9838 tree110 = TREE_OPERAND (tree11, 0);
9839 tree111 = TREE_OPERAND (tree11, 1);
9840 STRIP_NOPS (tree110);
9841 STRIP_NOPS (tree111);
9842 if (TREE_CODE (tree110) == INTEGER_CST
9843 && 0 == compare_tree_int (tree110,
9845 (TREE_TYPE (TREE_OPERAND
9847 && operand_equal_p (tree01, tree111, 0))
9849 fold_convert_loc (loc, type,
9850 build2 ((code0 == LSHIFT_EXPR
9853 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9854 TREE_OPERAND (arg0, 0), tree01));
9856 else if (code01 == MINUS_EXPR)
9858 tree tree010, tree011;
9859 tree010 = TREE_OPERAND (tree01, 0);
9860 tree011 = TREE_OPERAND (tree01, 1);
9861 STRIP_NOPS (tree010);
9862 STRIP_NOPS (tree011);
9863 if (TREE_CODE (tree010) == INTEGER_CST
9864 && 0 == compare_tree_int (tree010,
9866 (TREE_TYPE (TREE_OPERAND
9868 && operand_equal_p (tree11, tree011, 0))
9869 return fold_convert_loc
9871 build2 ((code0 != LSHIFT_EXPR
9874 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9875 TREE_OPERAND (arg0, 0), tree11));
9881 /* In most languages, can't associate operations on floats through
9882 parentheses. Rather than remember where the parentheses were, we
9883 don't associate floats at all, unless the user has specified
9885 And, we need to make sure type is not saturating. */
9887 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9888 && !TYPE_SATURATING (type))
9890 tree var0, con0, lit0, minus_lit0;
9891 tree var1, con1, lit1, minus_lit1;
9894 /* Split both trees into variables, constants, and literals. Then
9895 associate each group together, the constants with literals,
9896 then the result with variables. This increases the chances of
9897 literals being recombined later and of generating relocatable
9898 expressions for the sum of a constant and literal. */
9899 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9900 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9901 code == MINUS_EXPR);
9903 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9904 if (code == MINUS_EXPR)
9907 /* With undefined overflow we can only associate constants with one
9908 variable, and constants whose association doesn't overflow. */
9909 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9910 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9917 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9918 tmp0 = TREE_OPERAND (tmp0, 0);
9919 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9920 tmp1 = TREE_OPERAND (tmp1, 0);
9921 /* The only case we can still associate with two variables
9922 is if they are the same, modulo negation. */
9923 if (!operand_equal_p (tmp0, tmp1, 0))
9927 if (ok && lit0 && lit1)
9929 tree tmp0 = fold_convert (type, lit0);
9930 tree tmp1 = fold_convert (type, lit1);
9932 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
9933 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
9938 /* Only do something if we found more than two objects. Otherwise,
9939 nothing has changed and we risk infinite recursion. */
9941 && (2 < ((var0 != 0) + (var1 != 0)
9942 + (con0 != 0) + (con1 != 0)
9943 + (lit0 != 0) + (lit1 != 0)
9944 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9946 var0 = associate_trees (loc, var0, var1, code, type);
9947 con0 = associate_trees (loc, con0, con1, code, type);
9948 lit0 = associate_trees (loc, lit0, lit1, code, type);
9949 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
9951 /* Preserve the MINUS_EXPR if the negative part of the literal is
9952 greater than the positive part. Otherwise, the multiplicative
9953 folding code (i.e extract_muldiv) may be fooled in case
9954 unsigned constants are subtracted, like in the following
9955 example: ((X*2 + 4) - 8U)/2. */
9956 if (minus_lit0 && lit0)
9958 if (TREE_CODE (lit0) == INTEGER_CST
9959 && TREE_CODE (minus_lit0) == INTEGER_CST
9960 && tree_int_cst_lt (lit0, minus_lit0))
9962 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9968 lit0 = associate_trees (loc, lit0, minus_lit0,
9977 fold_convert_loc (loc, type,
9978 associate_trees (loc, var0, minus_lit0,
9982 con0 = associate_trees (loc, con0, minus_lit0,
9985 fold_convert_loc (loc, type,
9986 associate_trees (loc, var0, con0,
9991 con0 = associate_trees (loc, con0, lit0, code, type);
9993 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10001 /* Pointer simplifications for subtraction, simple reassociations. */
10002 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10004 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10005 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10006 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10008 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10009 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10010 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10011 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10012 return fold_build2_loc (loc, PLUS_EXPR, type,
10013 fold_build2_loc (loc, MINUS_EXPR, type,
10015 fold_build2_loc (loc, MINUS_EXPR, type,
10018 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10019 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10021 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10022 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10023 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10024 fold_convert_loc (loc, type, arg1));
10026 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10029 /* A - (-B) -> A + B */
10030 if (TREE_CODE (arg1) == NEGATE_EXPR)
10031 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10032 fold_convert_loc (loc, type,
10033 TREE_OPERAND (arg1, 0)));
10034 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10035 if (TREE_CODE (arg0) == NEGATE_EXPR
10036 && (FLOAT_TYPE_P (type)
10037 || INTEGRAL_TYPE_P (type))
10038 && negate_expr_p (arg1)
10039 && reorder_operands_p (arg0, arg1))
10040 return fold_build2_loc (loc, MINUS_EXPR, type,
10041 fold_convert_loc (loc, type,
10042 negate_expr (arg1)),
10043 fold_convert_loc (loc, type,
10044 TREE_OPERAND (arg0, 0)));
10045 /* Convert -A - 1 to ~A. */
10046 if (INTEGRAL_TYPE_P (type)
10047 && TREE_CODE (arg0) == NEGATE_EXPR
10048 && integer_onep (arg1)
10049 && !TYPE_OVERFLOW_TRAPS (type))
10050 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10051 fold_convert_loc (loc, type,
10052 TREE_OPERAND (arg0, 0)));
10054 /* Convert -1 - A to ~A. */
10055 if (INTEGRAL_TYPE_P (type)
10056 && integer_all_onesp (arg0))
10057 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10060 /* X - (X / CST) * CST is X % CST. */
10061 if (INTEGRAL_TYPE_P (type)
10062 && TREE_CODE (arg1) == MULT_EXPR
10063 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10064 && operand_equal_p (arg0,
10065 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10066 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10067 TREE_OPERAND (arg1, 1), 0))
10069 fold_convert_loc (loc, type,
10070 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10071 arg0, TREE_OPERAND (arg1, 1)));
10073 if (! FLOAT_TYPE_P (type))
10075 if (integer_zerop (arg0))
10076 return negate_expr (fold_convert_loc (loc, type, arg1));
10077 if (integer_zerop (arg1))
10078 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10080 /* Fold A - (A & B) into ~B & A. */
10081 if (!TREE_SIDE_EFFECTS (arg0)
10082 && TREE_CODE (arg1) == BIT_AND_EXPR)
10084 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10086 tree arg10 = fold_convert_loc (loc, type,
10087 TREE_OPERAND (arg1, 0));
10088 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10089 fold_build1_loc (loc, BIT_NOT_EXPR,
10091 fold_convert_loc (loc, type, arg0));
10093 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10095 tree arg11 = fold_convert_loc (loc,
10096 type, TREE_OPERAND (arg1, 1));
10097 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10098 fold_build1_loc (loc, BIT_NOT_EXPR,
10100 fold_convert_loc (loc, type, arg0));
10104 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10105 any power of 2 minus 1. */
10106 if (TREE_CODE (arg0) == BIT_AND_EXPR
10107 && TREE_CODE (arg1) == BIT_AND_EXPR
10108 && operand_equal_p (TREE_OPERAND (arg0, 0),
10109 TREE_OPERAND (arg1, 0), 0))
10111 tree mask0 = TREE_OPERAND (arg0, 1);
10112 tree mask1 = TREE_OPERAND (arg1, 1);
10113 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10115 if (operand_equal_p (tem, mask1, 0))
10117 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10118 TREE_OPERAND (arg0, 0), mask1);
10119 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10124 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10125 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10126 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10128 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10129 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10130 (-ARG1 + ARG0) reduces to -ARG1. */
10131 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10132 return negate_expr (fold_convert_loc (loc, type, arg1));
10134 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10135 __complex__ ( x, -y ). This is not the same for SNaNs or if
10136 signed zeros are involved. */
10137 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10138 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10139 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10141 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10142 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10143 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10144 bool arg0rz = false, arg0iz = false;
10145 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10146 || (arg0i && (arg0iz = real_zerop (arg0i))))
10148 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10149 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10150 if (arg0rz && arg1i && real_zerop (arg1i))
10152 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10154 : build1 (REALPART_EXPR, rtype, arg1));
10155 tree ip = arg0i ? arg0i
10156 : build1 (IMAGPART_EXPR, rtype, arg0);
10157 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10159 else if (arg0iz && arg1r && real_zerop (arg1r))
10161 tree rp = arg0r ? arg0r
10162 : build1 (REALPART_EXPR, rtype, arg0);
10163 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10165 : build1 (IMAGPART_EXPR, rtype, arg1));
10166 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10171 /* Fold &x - &x. This can happen from &x.foo - &x.
10172 This is unsafe for certain floats even in non-IEEE formats.
10173 In IEEE, it is unsafe because it does wrong for NaNs.
10174 Also note that operand_equal_p is always false if an operand
10177 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10178 && operand_equal_p (arg0, arg1, 0))
10179 return build_zero_cst (type);
10181 /* A - B -> A + (-B) if B is easily negatable. */
10182 if (negate_expr_p (arg1)
10183 && ((FLOAT_TYPE_P (type)
10184 /* Avoid this transformation if B is a positive REAL_CST. */
10185 && (TREE_CODE (arg1) != REAL_CST
10186 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10187 || INTEGRAL_TYPE_P (type)))
10188 return fold_build2_loc (loc, PLUS_EXPR, type,
10189 fold_convert_loc (loc, type, arg0),
10190 fold_convert_loc (loc, type,
10191 negate_expr (arg1)));
10193 /* Try folding difference of addresses. */
10195 HOST_WIDE_INT diff;
10197 if ((TREE_CODE (arg0) == ADDR_EXPR
10198 || TREE_CODE (arg1) == ADDR_EXPR)
10199 && ptr_difference_const (arg0, arg1, &diff))
10200 return build_int_cst_type (type, diff);
10203 /* Fold &a[i] - &a[j] to i-j. */
10204 if (TREE_CODE (arg0) == ADDR_EXPR
10205 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10206 && TREE_CODE (arg1) == ADDR_EXPR
10207 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10209 tree aref0 = TREE_OPERAND (arg0, 0);
10210 tree aref1 = TREE_OPERAND (arg1, 0);
10211 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10212 TREE_OPERAND (aref1, 0), 0))
10214 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10215 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10216 tree esz = array_ref_element_size (aref0);
10217 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10218 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10219 fold_convert_loc (loc, type, esz));
10224 if (FLOAT_TYPE_P (type)
10225 && flag_unsafe_math_optimizations
10226 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10227 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10228 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10231 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10232 same or one. Make sure type is not saturating.
10233 fold_plusminus_mult_expr will re-associate. */
10234 if ((TREE_CODE (arg0) == MULT_EXPR
10235 || TREE_CODE (arg1) == MULT_EXPR)
10236 && !TYPE_SATURATING (type)
10237 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10239 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10247 /* (-A) * (-B) -> A * B */
10248 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10249 return fold_build2_loc (loc, MULT_EXPR, type,
10250 fold_convert_loc (loc, type,
10251 TREE_OPERAND (arg0, 0)),
10252 fold_convert_loc (loc, type,
10253 negate_expr (arg1)));
10254 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10255 return fold_build2_loc (loc, MULT_EXPR, type,
10256 fold_convert_loc (loc, type,
10257 negate_expr (arg0)),
10258 fold_convert_loc (loc, type,
10259 TREE_OPERAND (arg1, 0)));
10261 if (! FLOAT_TYPE_P (type))
10263 if (integer_zerop (arg1))
10264 return omit_one_operand_loc (loc, type, arg1, arg0);
10265 if (integer_onep (arg1))
10266 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10267 /* Transform x * -1 into -x. Make sure to do the negation
10268 on the original operand with conversions not stripped
10269 because we can only strip non-sign-changing conversions. */
10270 if (integer_all_onesp (arg1))
10271 return fold_convert_loc (loc, type, negate_expr (op0));
10272 /* Transform x * -C into -x * C if x is easily negatable. */
10273 if (TREE_CODE (arg1) == INTEGER_CST
10274 && tree_int_cst_sgn (arg1) == -1
10275 && negate_expr_p (arg0)
10276 && (tem = negate_expr (arg1)) != arg1
10277 && !TREE_OVERFLOW (tem))
10278 return fold_build2_loc (loc, MULT_EXPR, type,
10279 fold_convert_loc (loc, type,
10280 negate_expr (arg0)),
10283 /* (a * (1 << b)) is (a << b) */
10284 if (TREE_CODE (arg1) == LSHIFT_EXPR
10285 && integer_onep (TREE_OPERAND (arg1, 0)))
10286 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10287 TREE_OPERAND (arg1, 1));
10288 if (TREE_CODE (arg0) == LSHIFT_EXPR
10289 && integer_onep (TREE_OPERAND (arg0, 0)))
10290 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10291 TREE_OPERAND (arg0, 1));
10293 /* (A + A) * C -> A * 2 * C */
10294 if (TREE_CODE (arg0) == PLUS_EXPR
10295 && TREE_CODE (arg1) == INTEGER_CST
10296 && operand_equal_p (TREE_OPERAND (arg0, 0),
10297 TREE_OPERAND (arg0, 1), 0))
10298 return fold_build2_loc (loc, MULT_EXPR, type,
10299 omit_one_operand_loc (loc, type,
10300 TREE_OPERAND (arg0, 0),
10301 TREE_OPERAND (arg0, 1)),
10302 fold_build2_loc (loc, MULT_EXPR, type,
10303 build_int_cst (type, 2) , arg1));
10305 strict_overflow_p = false;
10306 if (TREE_CODE (arg1) == INTEGER_CST
10307 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10308 &strict_overflow_p)))
10310 if (strict_overflow_p)
10311 fold_overflow_warning (("assuming signed overflow does not "
10312 "occur when simplifying "
10314 WARN_STRICT_OVERFLOW_MISC);
10315 return fold_convert_loc (loc, type, tem);
10318 /* Optimize z * conj(z) for integer complex numbers. */
10319 if (TREE_CODE (arg0) == CONJ_EXPR
10320 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10321 return fold_mult_zconjz (loc, type, arg1);
10322 if (TREE_CODE (arg1) == CONJ_EXPR
10323 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10324 return fold_mult_zconjz (loc, type, arg0);
10328 /* Maybe fold x * 0 to 0. The expressions aren't the same
10329 when x is NaN, since x * 0 is also NaN. Nor are they the
10330 same in modes with signed zeros, since multiplying a
10331 negative value by 0 gives -0, not +0. */
10332 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10333 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10334 && real_zerop (arg1))
10335 return omit_one_operand_loc (loc, type, arg1, arg0);
10336 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10337 Likewise for complex arithmetic with signed zeros. */
10338 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10339 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10340 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10341 && real_onep (arg1))
10342 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10344 /* Transform x * -1.0 into -x. */
10345 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10346 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10347 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10348 && real_minus_onep (arg1))
10349 return fold_convert_loc (loc, type, negate_expr (arg0));
10351 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10352 the result for floating point types due to rounding so it is applied
10353 only if -fassociative-math was specify. */
10354 if (flag_associative_math
10355 && TREE_CODE (arg0) == RDIV_EXPR
10356 && TREE_CODE (arg1) == REAL_CST
10357 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10359 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10362 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10363 TREE_OPERAND (arg0, 1));
10366 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10367 if (operand_equal_p (arg0, arg1, 0))
10369 tree tem = fold_strip_sign_ops (arg0);
10370 if (tem != NULL_TREE)
10372 tem = fold_convert_loc (loc, type, tem);
10373 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10377 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10378 This is not the same for NaNs or if signed zeros are
10380 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10381 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10382 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10383 && TREE_CODE (arg1) == COMPLEX_CST
10384 && real_zerop (TREE_REALPART (arg1)))
10386 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10387 if (real_onep (TREE_IMAGPART (arg1)))
10389 fold_build2_loc (loc, COMPLEX_EXPR, type,
10390 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10392 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10393 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10395 fold_build2_loc (loc, COMPLEX_EXPR, type,
10396 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10397 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10401 /* Optimize z * conj(z) for floating point complex numbers.
10402 Guarded by flag_unsafe_math_optimizations as non-finite
10403 imaginary components don't produce scalar results. */
10404 if (flag_unsafe_math_optimizations
10405 && TREE_CODE (arg0) == CONJ_EXPR
10406 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10407 return fold_mult_zconjz (loc, type, arg1);
10408 if (flag_unsafe_math_optimizations
10409 && TREE_CODE (arg1) == CONJ_EXPR
10410 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10411 return fold_mult_zconjz (loc, type, arg0);
10413 if (flag_unsafe_math_optimizations)
10415 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10416 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10418 /* Optimizations of root(...)*root(...). */
10419 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10422 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10423 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10425 /* Optimize sqrt(x)*sqrt(x) as x. */
10426 if (BUILTIN_SQRT_P (fcode0)
10427 && operand_equal_p (arg00, arg10, 0)
10428 && ! HONOR_SNANS (TYPE_MODE (type)))
10431 /* Optimize root(x)*root(y) as root(x*y). */
10432 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10433 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10434 return build_call_expr_loc (loc, rootfn, 1, arg);
10437 /* Optimize expN(x)*expN(y) as expN(x+y). */
10438 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10440 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10441 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10442 CALL_EXPR_ARG (arg0, 0),
10443 CALL_EXPR_ARG (arg1, 0));
10444 return build_call_expr_loc (loc, expfn, 1, arg);
10447 /* Optimizations of pow(...)*pow(...). */
10448 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10449 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10450 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10452 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10453 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10454 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10455 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10457 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10458 if (operand_equal_p (arg01, arg11, 0))
10460 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10461 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10463 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10466 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10467 if (operand_equal_p (arg00, arg10, 0))
10469 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10470 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10472 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10476 /* Optimize tan(x)*cos(x) as sin(x). */
10477 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10478 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10479 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10480 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10481 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10482 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10483 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10484 CALL_EXPR_ARG (arg1, 0), 0))
10486 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10488 if (sinfn != NULL_TREE)
10489 return build_call_expr_loc (loc, sinfn, 1,
10490 CALL_EXPR_ARG (arg0, 0));
10493 /* Optimize x*pow(x,c) as pow(x,c+1). */
10494 if (fcode1 == BUILT_IN_POW
10495 || fcode1 == BUILT_IN_POWF
10496 || fcode1 == BUILT_IN_POWL)
10498 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10499 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10500 if (TREE_CODE (arg11) == REAL_CST
10501 && !TREE_OVERFLOW (arg11)
10502 && operand_equal_p (arg0, arg10, 0))
10504 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10508 c = TREE_REAL_CST (arg11);
10509 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10510 arg = build_real (type, c);
10511 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10515 /* Optimize pow(x,c)*x as pow(x,c+1). */
10516 if (fcode0 == BUILT_IN_POW
10517 || fcode0 == BUILT_IN_POWF
10518 || fcode0 == BUILT_IN_POWL)
10520 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10521 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10522 if (TREE_CODE (arg01) == REAL_CST
10523 && !TREE_OVERFLOW (arg01)
10524 && operand_equal_p (arg1, arg00, 0))
10526 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10530 c = TREE_REAL_CST (arg01);
10531 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10532 arg = build_real (type, c);
10533 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10537 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10538 if (optimize_function_for_speed_p (cfun)
10539 && operand_equal_p (arg0, arg1, 0))
10541 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10545 tree arg = build_real (type, dconst2);
10546 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10555 if (integer_all_onesp (arg1))
10556 return omit_one_operand_loc (loc, type, arg1, arg0);
10557 if (integer_zerop (arg1))
10558 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10559 if (operand_equal_p (arg0, arg1, 0))
10560 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10562 /* ~X | X is -1. */
10563 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10564 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10566 t1 = build_zero_cst (type);
10567 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10568 return omit_one_operand_loc (loc, type, t1, arg1);
10571 /* X | ~X is -1. */
10572 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10573 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10575 t1 = build_zero_cst (type);
10576 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10577 return omit_one_operand_loc (loc, type, t1, arg0);
10580 /* Canonicalize (X & C1) | C2. */
10581 if (TREE_CODE (arg0) == BIT_AND_EXPR
10582 && TREE_CODE (arg1) == INTEGER_CST
10583 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10585 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10586 int width = TYPE_PRECISION (type), w;
10587 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10588 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10589 hi2 = TREE_INT_CST_HIGH (arg1);
10590 lo2 = TREE_INT_CST_LOW (arg1);
10592 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10593 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10594 return omit_one_operand_loc (loc, type, arg1,
10595 TREE_OPERAND (arg0, 0));
10597 if (width > HOST_BITS_PER_WIDE_INT)
10599 mhi = (unsigned HOST_WIDE_INT) -1
10600 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10606 mlo = (unsigned HOST_WIDE_INT) -1
10607 >> (HOST_BITS_PER_WIDE_INT - width);
10610 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10611 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10612 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10613 TREE_OPERAND (arg0, 0), arg1);
10615 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10616 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10617 mode which allows further optimizations. */
10624 for (w = BITS_PER_UNIT;
10625 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10628 unsigned HOST_WIDE_INT mask
10629 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10630 if (((lo1 | lo2) & mask) == mask
10631 && (lo1 & ~mask) == 0 && hi1 == 0)
10638 if (hi3 != hi1 || lo3 != lo1)
10639 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10640 fold_build2_loc (loc, BIT_AND_EXPR, type,
10641 TREE_OPERAND (arg0, 0),
10642 build_int_cst_wide (type,
10647 /* (X & Y) | Y is (X, Y). */
10648 if (TREE_CODE (arg0) == BIT_AND_EXPR
10649 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10650 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10651 /* (X & Y) | X is (Y, X). */
10652 if (TREE_CODE (arg0) == BIT_AND_EXPR
10653 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10654 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10655 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10656 /* X | (X & Y) is (Y, X). */
10657 if (TREE_CODE (arg1) == BIT_AND_EXPR
10658 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10659 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10660 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10661 /* X | (Y & X) is (Y, X). */
10662 if (TREE_CODE (arg1) == BIT_AND_EXPR
10663 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10664 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10665 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10667 /* (X & ~Y) | (~X & Y) is X ^ Y */
10668 if (TREE_CODE (arg0) == BIT_AND_EXPR
10669 && TREE_CODE (arg1) == BIT_AND_EXPR)
10671 tree a0, a1, l0, l1, n0, n1;
10673 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10674 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10676 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10677 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10679 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10680 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10682 if ((operand_equal_p (n0, a0, 0)
10683 && operand_equal_p (n1, a1, 0))
10684 || (operand_equal_p (n0, a1, 0)
10685 && operand_equal_p (n1, a0, 0)))
10686 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10689 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10690 if (t1 != NULL_TREE)
10693 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10695 This results in more efficient code for machines without a NAND
10696 instruction. Combine will canonicalize to the first form
10697 which will allow use of NAND instructions provided by the
10698 backend if they exist. */
10699 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10700 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10703 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10704 build2 (BIT_AND_EXPR, type,
10705 fold_convert_loc (loc, type,
10706 TREE_OPERAND (arg0, 0)),
10707 fold_convert_loc (loc, type,
10708 TREE_OPERAND (arg1, 0))));
10711 /* See if this can be simplified into a rotate first. If that
10712 is unsuccessful continue in the association code. */
10716 if (integer_zerop (arg1))
10717 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10718 if (integer_all_onesp (arg1))
10719 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10720 if (operand_equal_p (arg0, arg1, 0))
10721 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10723 /* ~X ^ X is -1. */
10724 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10725 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10727 t1 = build_zero_cst (type);
10728 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10729 return omit_one_operand_loc (loc, type, t1, arg1);
10732 /* X ^ ~X is -1. */
10733 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10734 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10736 t1 = build_zero_cst (type);
10737 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10738 return omit_one_operand_loc (loc, type, t1, arg0);
10741 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10742 with a constant, and the two constants have no bits in common,
10743 we should treat this as a BIT_IOR_EXPR since this may produce more
10744 simplifications. */
10745 if (TREE_CODE (arg0) == BIT_AND_EXPR
10746 && TREE_CODE (arg1) == BIT_AND_EXPR
10747 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10748 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10749 && integer_zerop (const_binop (BIT_AND_EXPR,
10750 TREE_OPERAND (arg0, 1),
10751 TREE_OPERAND (arg1, 1))))
10753 code = BIT_IOR_EXPR;
10757 /* (X | Y) ^ X -> Y & ~ X*/
10758 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10759 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10761 tree t2 = TREE_OPERAND (arg0, 1);
10762 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10764 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10765 fold_convert_loc (loc, type, t2),
10766 fold_convert_loc (loc, type, t1));
10770 /* (Y | X) ^ X -> Y & ~ X*/
10771 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10772 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10774 tree t2 = TREE_OPERAND (arg0, 0);
10775 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10777 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10778 fold_convert_loc (loc, type, t2),
10779 fold_convert_loc (loc, type, t1));
10783 /* X ^ (X | Y) -> Y & ~ X*/
10784 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10785 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10787 tree t2 = TREE_OPERAND (arg1, 1);
10788 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10790 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10791 fold_convert_loc (loc, type, t2),
10792 fold_convert_loc (loc, type, t1));
10796 /* X ^ (Y | X) -> Y & ~ X*/
10797 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10798 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10800 tree t2 = TREE_OPERAND (arg1, 0);
10801 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10803 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10804 fold_convert_loc (loc, type, t2),
10805 fold_convert_loc (loc, type, t1));
10809 /* Convert ~X ^ ~Y to X ^ Y. */
10810 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10811 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10812 return fold_build2_loc (loc, code, type,
10813 fold_convert_loc (loc, type,
10814 TREE_OPERAND (arg0, 0)),
10815 fold_convert_loc (loc, type,
10816 TREE_OPERAND (arg1, 0)));
10818 /* Convert ~X ^ C to X ^ ~C. */
10819 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10820 && TREE_CODE (arg1) == INTEGER_CST)
10821 return fold_build2_loc (loc, code, type,
10822 fold_convert_loc (loc, type,
10823 TREE_OPERAND (arg0, 0)),
10824 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10826 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10827 if (TREE_CODE (arg0) == BIT_AND_EXPR
10828 && integer_onep (TREE_OPERAND (arg0, 1))
10829 && integer_onep (arg1))
10830 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10831 build_int_cst (TREE_TYPE (arg0), 0));
10833 /* Fold (X & Y) ^ Y as ~X & Y. */
10834 if (TREE_CODE (arg0) == BIT_AND_EXPR
10835 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10837 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10838 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10839 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10840 fold_convert_loc (loc, type, arg1));
10842 /* Fold (X & Y) ^ X as ~Y & X. */
10843 if (TREE_CODE (arg0) == BIT_AND_EXPR
10844 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10845 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10847 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10848 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10849 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10850 fold_convert_loc (loc, type, arg1));
10852 /* Fold X ^ (X & Y) as X & ~Y. */
10853 if (TREE_CODE (arg1) == BIT_AND_EXPR
10854 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10856 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10857 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10858 fold_convert_loc (loc, type, arg0),
10859 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10861 /* Fold X ^ (Y & X) as ~Y & X. */
10862 if (TREE_CODE (arg1) == BIT_AND_EXPR
10863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10864 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10866 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10867 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10868 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10869 fold_convert_loc (loc, type, arg0));
10872 /* See if this can be simplified into a rotate first. If that
10873 is unsuccessful continue in the association code. */
10877 if (integer_all_onesp (arg1))
10878 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10879 if (integer_zerop (arg1))
10880 return omit_one_operand_loc (loc, type, arg1, arg0);
10881 if (operand_equal_p (arg0, arg1, 0))
10882 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10884 /* ~X & X is always zero. */
10885 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10886 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10887 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10889 /* X & ~X is always zero. */
10890 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10891 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10892 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10894 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10895 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10896 && TREE_CODE (arg1) == INTEGER_CST
10897 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10899 tree tmp1 = fold_convert_loc (loc, type, arg1);
10900 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10901 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10902 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10903 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10905 fold_convert_loc (loc, type,
10906 fold_build2_loc (loc, BIT_IOR_EXPR,
10907 type, tmp2, tmp3));
10910 /* (X | Y) & Y is (X, Y). */
10911 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10912 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10913 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10914 /* (X | Y) & X is (Y, X). */
10915 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10916 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10917 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10918 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10919 /* X & (X | Y) is (Y, X). */
10920 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10921 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10922 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10923 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10924 /* X & (Y | X) is (Y, X). */
10925 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10926 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10927 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10928 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10930 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10931 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10932 && integer_onep (TREE_OPERAND (arg0, 1))
10933 && integer_onep (arg1))
10935 tem = TREE_OPERAND (arg0, 0);
10936 return fold_build2_loc (loc, EQ_EXPR, type,
10937 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10938 build_int_cst (TREE_TYPE (tem), 1)),
10939 build_int_cst (TREE_TYPE (tem), 0));
10941 /* Fold ~X & 1 as (X & 1) == 0. */
10942 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10943 && integer_onep (arg1))
10945 tem = TREE_OPERAND (arg0, 0);
10946 return fold_build2_loc (loc, EQ_EXPR, type,
10947 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10948 build_int_cst (TREE_TYPE (tem), 1)),
10949 build_int_cst (TREE_TYPE (tem), 0));
10952 /* Fold (X ^ Y) & Y as ~X & Y. */
10953 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10954 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10956 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10957 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10958 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10959 fold_convert_loc (loc, type, arg1));
10961 /* Fold (X ^ Y) & X as ~Y & X. */
10962 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10963 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10964 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10966 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10967 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10968 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10969 fold_convert_loc (loc, type, arg1));
10971 /* Fold X & (X ^ Y) as X & ~Y. */
10972 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10973 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10975 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10976 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10977 fold_convert_loc (loc, type, arg0),
10978 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10980 /* Fold X & (Y ^ X) as ~Y & X. */
10981 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10982 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10983 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10985 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10986 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10987 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10988 fold_convert_loc (loc, type, arg0));
10991 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10992 ((A & N) + B) & M -> (A + B) & M
10993 Similarly if (N & M) == 0,
10994 ((A | N) + B) & M -> (A + B) & M
10995 and for - instead of + (or unary - instead of +)
10996 and/or ^ instead of |.
10997 If B is constant and (B & M) == 0, fold into A & M. */
10998 if (host_integerp (arg1, 1))
11000 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11001 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11002 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11003 && (TREE_CODE (arg0) == PLUS_EXPR
11004 || TREE_CODE (arg0) == MINUS_EXPR
11005 || TREE_CODE (arg0) == NEGATE_EXPR)
11006 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11007 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11011 unsigned HOST_WIDE_INT cst0;
11013 /* Now we know that arg0 is (C + D) or (C - D) or
11014 -C and arg1 (M) is == (1LL << cst) - 1.
11015 Store C into PMOP[0] and D into PMOP[1]. */
11016 pmop[0] = TREE_OPERAND (arg0, 0);
11018 if (TREE_CODE (arg0) != NEGATE_EXPR)
11020 pmop[1] = TREE_OPERAND (arg0, 1);
11024 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11025 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11029 for (; which >= 0; which--)
11030 switch (TREE_CODE (pmop[which]))
11035 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11038 /* tree_low_cst not used, because we don't care about
11040 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11042 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11047 else if (cst0 != 0)
11049 /* If C or D is of the form (A & N) where
11050 (N & M) == M, or of the form (A | N) or
11051 (A ^ N) where (N & M) == 0, replace it with A. */
11052 pmop[which] = TREE_OPERAND (pmop[which], 0);
11055 /* If C or D is a N where (N & M) == 0, it can be
11056 omitted (assumed 0). */
11057 if ((TREE_CODE (arg0) == PLUS_EXPR
11058 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11059 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11060 pmop[which] = NULL;
11066 /* Only build anything new if we optimized one or both arguments
11068 if (pmop[0] != TREE_OPERAND (arg0, 0)
11069 || (TREE_CODE (arg0) != NEGATE_EXPR
11070 && pmop[1] != TREE_OPERAND (arg0, 1)))
11072 tree utype = TREE_TYPE (arg0);
11073 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11075 /* Perform the operations in a type that has defined
11076 overflow behavior. */
11077 utype = unsigned_type_for (TREE_TYPE (arg0));
11078 if (pmop[0] != NULL)
11079 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11080 if (pmop[1] != NULL)
11081 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11084 if (TREE_CODE (arg0) == NEGATE_EXPR)
11085 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11086 else if (TREE_CODE (arg0) == PLUS_EXPR)
11088 if (pmop[0] != NULL && pmop[1] != NULL)
11089 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11091 else if (pmop[0] != NULL)
11093 else if (pmop[1] != NULL)
11096 return build_int_cst (type, 0);
11098 else if (pmop[0] == NULL)
11099 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11101 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11103 /* TEM is now the new binary +, - or unary - replacement. */
11104 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11105 fold_convert_loc (loc, utype, arg1));
11106 return fold_convert_loc (loc, type, tem);
11111 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11112 if (t1 != NULL_TREE)
11114 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11115 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11116 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11119 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11121 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11122 && (~TREE_INT_CST_LOW (arg1)
11123 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11125 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11128 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11130 This results in more efficient code for machines without a NOR
11131 instruction. Combine will canonicalize to the first form
11132 which will allow use of NOR instructions provided by the
11133 backend if they exist. */
11134 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11135 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11137 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11138 build2 (BIT_IOR_EXPR, type,
11139 fold_convert_loc (loc, type,
11140 TREE_OPERAND (arg0, 0)),
11141 fold_convert_loc (loc, type,
11142 TREE_OPERAND (arg1, 0))));
11145 /* If arg0 is derived from the address of an object or function, we may
11146 be able to fold this expression using the object or function's
11148 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11150 unsigned HOST_WIDE_INT modulus, residue;
11151 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11153 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11154 integer_onep (arg1));
11156 /* This works because modulus is a power of 2. If this weren't the
11157 case, we'd have to replace it by its greatest power-of-2
11158 divisor: modulus & -modulus. */
11160 return build_int_cst (type, residue & low);
11163 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11164 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11165 if the new mask might be further optimized. */
11166 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11167 || TREE_CODE (arg0) == RSHIFT_EXPR)
11168 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11169 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11170 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11171 < TYPE_PRECISION (TREE_TYPE (arg0))
11172 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11173 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11175 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11176 unsigned HOST_WIDE_INT mask
11177 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11178 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11179 tree shift_type = TREE_TYPE (arg0);
11181 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11182 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11183 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11184 && TYPE_PRECISION (TREE_TYPE (arg0))
11185 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11187 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11188 tree arg00 = TREE_OPERAND (arg0, 0);
11189 /* See if more bits can be proven as zero because of
11191 if (TREE_CODE (arg00) == NOP_EXPR
11192 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11194 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11195 if (TYPE_PRECISION (inner_type)
11196 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11197 && TYPE_PRECISION (inner_type) < prec)
11199 prec = TYPE_PRECISION (inner_type);
11200 /* See if we can shorten the right shift. */
11202 shift_type = inner_type;
11205 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11206 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11207 zerobits <<= prec - shiftc;
11208 /* For arithmetic shift if sign bit could be set, zerobits
11209 can contain actually sign bits, so no transformation is
11210 possible, unless MASK masks them all away. In that
11211 case the shift needs to be converted into logical shift. */
11212 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11213 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11215 if ((mask & zerobits) == 0)
11216 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11222 /* ((X << 16) & 0xff00) is (X, 0). */
11223 if ((mask & zerobits) == mask)
11224 return omit_one_operand_loc (loc, type,
11225 build_int_cst (type, 0), arg0);
11227 newmask = mask | zerobits;
11228 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11232 /* Only do the transformation if NEWMASK is some integer
11234 for (prec = BITS_PER_UNIT;
11235 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11236 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11238 if (prec < HOST_BITS_PER_WIDE_INT
11239 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11243 if (shift_type != TREE_TYPE (arg0))
11245 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11246 fold_convert_loc (loc, shift_type,
11247 TREE_OPERAND (arg0, 0)),
11248 TREE_OPERAND (arg0, 1));
11249 tem = fold_convert_loc (loc, type, tem);
11253 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11254 if (!tree_int_cst_equal (newmaskt, arg1))
11255 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11263 /* Don't touch a floating-point divide by zero unless the mode
11264 of the constant can represent infinity. */
11265 if (TREE_CODE (arg1) == REAL_CST
11266 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11267 && real_zerop (arg1))
11270 /* Optimize A / A to 1.0 if we don't care about
11271 NaNs or Infinities. Skip the transformation
11272 for non-real operands. */
11273 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11274 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11275 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11276 && operand_equal_p (arg0, arg1, 0))
11278 tree r = build_real (TREE_TYPE (arg0), dconst1);
11280 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11283 /* The complex version of the above A / A optimization. */
11284 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11285 && operand_equal_p (arg0, arg1, 0))
11287 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11288 if (! HONOR_NANS (TYPE_MODE (elem_type))
11289 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11291 tree r = build_real (elem_type, dconst1);
11292 /* omit_two_operands will call fold_convert for us. */
11293 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11297 /* (-A) / (-B) -> A / B */
11298 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11299 return fold_build2_loc (loc, RDIV_EXPR, type,
11300 TREE_OPERAND (arg0, 0),
11301 negate_expr (arg1));
11302 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11303 return fold_build2_loc (loc, RDIV_EXPR, type,
11304 negate_expr (arg0),
11305 TREE_OPERAND (arg1, 0));
11307 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11308 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11309 && real_onep (arg1))
11310 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11312 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11313 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11314 && real_minus_onep (arg1))
11315 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11316 negate_expr (arg0)));
11318 /* If ARG1 is a constant, we can convert this to a multiply by the
11319 reciprocal. This does not have the same rounding properties,
11320 so only do this if -freciprocal-math. We can actually
11321 always safely do it if ARG1 is a power of two, but it's hard to
11322 tell if it is or not in a portable manner. */
11323 if (TREE_CODE (arg1) == REAL_CST)
11325 if (flag_reciprocal_math
11326 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11328 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11329 /* Find the reciprocal if optimizing and the result is exact. */
11333 r = TREE_REAL_CST (arg1);
11334 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11336 tem = build_real (type, r);
11337 return fold_build2_loc (loc, MULT_EXPR, type,
11338 fold_convert_loc (loc, type, arg0), tem);
11342 /* Convert A/B/C to A/(B*C). */
11343 if (flag_reciprocal_math
11344 && TREE_CODE (arg0) == RDIV_EXPR)
11345 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11346 fold_build2_loc (loc, MULT_EXPR, type,
11347 TREE_OPERAND (arg0, 1), arg1));
11349 /* Convert A/(B/C) to (A/B)*C. */
11350 if (flag_reciprocal_math
11351 && TREE_CODE (arg1) == RDIV_EXPR)
11352 return fold_build2_loc (loc, MULT_EXPR, type,
11353 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11354 TREE_OPERAND (arg1, 0)),
11355 TREE_OPERAND (arg1, 1));
11357 /* Convert C1/(X*C2) into (C1/C2)/X. */
11358 if (flag_reciprocal_math
11359 && TREE_CODE (arg1) == MULT_EXPR
11360 && TREE_CODE (arg0) == REAL_CST
11361 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11363 tree tem = const_binop (RDIV_EXPR, arg0,
11364 TREE_OPERAND (arg1, 1));
11366 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11367 TREE_OPERAND (arg1, 0));
11370 if (flag_unsafe_math_optimizations)
11372 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11373 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11375 /* Optimize sin(x)/cos(x) as tan(x). */
11376 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11377 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11378 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11379 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11380 CALL_EXPR_ARG (arg1, 0), 0))
11382 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11384 if (tanfn != NULL_TREE)
11385 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11388 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11389 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11390 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11391 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11392 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11393 CALL_EXPR_ARG (arg1, 0), 0))
11395 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11397 if (tanfn != NULL_TREE)
11399 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11400 CALL_EXPR_ARG (arg0, 0));
11401 return fold_build2_loc (loc, RDIV_EXPR, type,
11402 build_real (type, dconst1), tmp);
11406 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11407 NaNs or Infinities. */
11408 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11409 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11410 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11412 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11413 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11415 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11416 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11417 && operand_equal_p (arg00, arg01, 0))
11419 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11421 if (cosfn != NULL_TREE)
11422 return build_call_expr_loc (loc, cosfn, 1, arg00);
11426 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11427 NaNs or Infinities. */
11428 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11429 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11430 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11432 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11433 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11435 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11436 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11437 && operand_equal_p (arg00, arg01, 0))
11439 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11441 if (cosfn != NULL_TREE)
11443 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11444 return fold_build2_loc (loc, RDIV_EXPR, type,
11445 build_real (type, dconst1),
11451 /* Optimize pow(x,c)/x as pow(x,c-1). */
11452 if (fcode0 == BUILT_IN_POW
11453 || fcode0 == BUILT_IN_POWF
11454 || fcode0 == BUILT_IN_POWL)
11456 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11457 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11458 if (TREE_CODE (arg01) == REAL_CST
11459 && !TREE_OVERFLOW (arg01)
11460 && operand_equal_p (arg1, arg00, 0))
11462 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11466 c = TREE_REAL_CST (arg01);
11467 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11468 arg = build_real (type, c);
11469 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11473 /* Optimize a/root(b/c) into a*root(c/b). */
11474 if (BUILTIN_ROOT_P (fcode1))
11476 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11478 if (TREE_CODE (rootarg) == RDIV_EXPR)
11480 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11481 tree b = TREE_OPERAND (rootarg, 0);
11482 tree c = TREE_OPERAND (rootarg, 1);
11484 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11486 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11487 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11491 /* Optimize x/expN(y) into x*expN(-y). */
11492 if (BUILTIN_EXPONENT_P (fcode1))
11494 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11495 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11496 arg1 = build_call_expr_loc (loc,
11498 fold_convert_loc (loc, type, arg));
11499 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11502 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11503 if (fcode1 == BUILT_IN_POW
11504 || fcode1 == BUILT_IN_POWF
11505 || fcode1 == BUILT_IN_POWL)
11507 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11508 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11509 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11510 tree neg11 = fold_convert_loc (loc, type,
11511 negate_expr (arg11));
11512 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11513 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11518 case TRUNC_DIV_EXPR:
11519 /* Optimize (X & (-A)) / A where A is a power of 2,
11521 if (TREE_CODE (arg0) == BIT_AND_EXPR
11522 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11523 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11525 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11526 arg1, TREE_OPERAND (arg0, 1));
11527 if (sum && integer_zerop (sum)) {
11528 unsigned long pow2;
11530 if (TREE_INT_CST_LOW (arg1))
11531 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11533 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11534 + HOST_BITS_PER_WIDE_INT;
11536 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11537 TREE_OPERAND (arg0, 0),
11538 build_int_cst (integer_type_node, pow2));
11544 case FLOOR_DIV_EXPR:
11545 /* Simplify A / (B << N) where A and B are positive and B is
11546 a power of 2, to A >> (N + log2(B)). */
11547 strict_overflow_p = false;
11548 if (TREE_CODE (arg1) == LSHIFT_EXPR
11549 && (TYPE_UNSIGNED (type)
11550 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11552 tree sval = TREE_OPERAND (arg1, 0);
11553 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11555 tree sh_cnt = TREE_OPERAND (arg1, 1);
11556 unsigned long pow2;
11558 if (TREE_INT_CST_LOW (sval))
11559 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11561 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11562 + HOST_BITS_PER_WIDE_INT;
11564 if (strict_overflow_p)
11565 fold_overflow_warning (("assuming signed overflow does not "
11566 "occur when simplifying A / (B << N)"),
11567 WARN_STRICT_OVERFLOW_MISC);
11569 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11571 build_int_cst (TREE_TYPE (sh_cnt),
11573 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11574 fold_convert_loc (loc, type, arg0), sh_cnt);
11578 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11579 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11580 if (INTEGRAL_TYPE_P (type)
11581 && TYPE_UNSIGNED (type)
11582 && code == FLOOR_DIV_EXPR)
11583 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11587 case ROUND_DIV_EXPR:
11588 case CEIL_DIV_EXPR:
11589 case EXACT_DIV_EXPR:
11590 if (integer_onep (arg1))
11591 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11592 if (integer_zerop (arg1))
11594 /* X / -1 is -X. */
11595 if (!TYPE_UNSIGNED (type)
11596 && TREE_CODE (arg1) == INTEGER_CST
11597 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11598 && TREE_INT_CST_HIGH (arg1) == -1)
11599 return fold_convert_loc (loc, type, negate_expr (arg0));
11601 /* Convert -A / -B to A / B when the type is signed and overflow is
11603 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11604 && TREE_CODE (arg0) == NEGATE_EXPR
11605 && negate_expr_p (arg1))
11607 if (INTEGRAL_TYPE_P (type))
11608 fold_overflow_warning (("assuming signed overflow does not occur "
11609 "when distributing negation across "
11611 WARN_STRICT_OVERFLOW_MISC);
11612 return fold_build2_loc (loc, code, type,
11613 fold_convert_loc (loc, type,
11614 TREE_OPERAND (arg0, 0)),
11615 fold_convert_loc (loc, type,
11616 negate_expr (arg1)));
11618 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11619 && TREE_CODE (arg1) == NEGATE_EXPR
11620 && negate_expr_p (arg0))
11622 if (INTEGRAL_TYPE_P (type))
11623 fold_overflow_warning (("assuming signed overflow does not occur "
11624 "when distributing negation across "
11626 WARN_STRICT_OVERFLOW_MISC);
11627 return fold_build2_loc (loc, code, type,
11628 fold_convert_loc (loc, type,
11629 negate_expr (arg0)),
11630 fold_convert_loc (loc, type,
11631 TREE_OPERAND (arg1, 0)));
11634 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11635 operation, EXACT_DIV_EXPR.
11637 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11638 At one time others generated faster code, it's not clear if they do
11639 after the last round to changes to the DIV code in expmed.c. */
11640 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11641 && multiple_of_p (type, arg0, arg1))
11642 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11644 strict_overflow_p = false;
11645 if (TREE_CODE (arg1) == INTEGER_CST
11646 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11647 &strict_overflow_p)))
11649 if (strict_overflow_p)
11650 fold_overflow_warning (("assuming signed overflow does not occur "
11651 "when simplifying division"),
11652 WARN_STRICT_OVERFLOW_MISC);
11653 return fold_convert_loc (loc, type, tem);
11658 case CEIL_MOD_EXPR:
11659 case FLOOR_MOD_EXPR:
11660 case ROUND_MOD_EXPR:
11661 case TRUNC_MOD_EXPR:
11662 /* X % 1 is always zero, but be sure to preserve any side
11664 if (integer_onep (arg1))
11665 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11667 /* X % 0, return X % 0 unchanged so that we can get the
11668 proper warnings and errors. */
11669 if (integer_zerop (arg1))
11672 /* 0 % X is always zero, but be sure to preserve any side
11673 effects in X. Place this after checking for X == 0. */
11674 if (integer_zerop (arg0))
11675 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11677 /* X % -1 is zero. */
11678 if (!TYPE_UNSIGNED (type)
11679 && TREE_CODE (arg1) == INTEGER_CST
11680 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11681 && TREE_INT_CST_HIGH (arg1) == -1)
11682 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11684 /* X % -C is the same as X % C. */
11685 if (code == TRUNC_MOD_EXPR
11686 && !TYPE_UNSIGNED (type)
11687 && TREE_CODE (arg1) == INTEGER_CST
11688 && !TREE_OVERFLOW (arg1)
11689 && TREE_INT_CST_HIGH (arg1) < 0
11690 && !TYPE_OVERFLOW_TRAPS (type)
11691 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11692 && !sign_bit_p (arg1, arg1))
11693 return fold_build2_loc (loc, code, type,
11694 fold_convert_loc (loc, type, arg0),
11695 fold_convert_loc (loc, type,
11696 negate_expr (arg1)));
11698 /* X % -Y is the same as X % Y. */
11699 if (code == TRUNC_MOD_EXPR
11700 && !TYPE_UNSIGNED (type)
11701 && TREE_CODE (arg1) == NEGATE_EXPR
11702 && !TYPE_OVERFLOW_TRAPS (type))
11703 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11704 fold_convert_loc (loc, type,
11705 TREE_OPERAND (arg1, 0)));
11707 strict_overflow_p = false;
11708 if (TREE_CODE (arg1) == INTEGER_CST
11709 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11710 &strict_overflow_p)))
11712 if (strict_overflow_p)
11713 fold_overflow_warning (("assuming signed overflow does not occur "
11714 "when simplifying modulus"),
11715 WARN_STRICT_OVERFLOW_MISC);
11716 return fold_convert_loc (loc, type, tem);
11719 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11720 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11721 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11722 && (TYPE_UNSIGNED (type)
11723 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11726 /* Also optimize A % (C << N) where C is a power of 2,
11727 to A & ((C << N) - 1). */
11728 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11729 c = TREE_OPERAND (arg1, 0);
11731 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11734 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11735 build_int_cst (TREE_TYPE (arg1), 1));
11736 if (strict_overflow_p)
11737 fold_overflow_warning (("assuming signed overflow does not "
11738 "occur when simplifying "
11739 "X % (power of two)"),
11740 WARN_STRICT_OVERFLOW_MISC);
11741 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11742 fold_convert_loc (loc, type, arg0),
11743 fold_convert_loc (loc, type, mask));
11751 if (integer_all_onesp (arg0))
11752 return omit_one_operand_loc (loc, type, arg0, arg1);
11756 /* Optimize -1 >> x for arithmetic right shifts. */
11757 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11758 && tree_expr_nonnegative_p (arg1))
11759 return omit_one_operand_loc (loc, type, arg0, arg1);
11760 /* ... fall through ... */
11764 if (integer_zerop (arg1))
11765 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11766 if (integer_zerop (arg0))
11767 return omit_one_operand_loc (loc, type, arg0, arg1);
11769 /* Since negative shift count is not well-defined,
11770 don't try to compute it in the compiler. */
11771 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11774 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11775 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11776 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11777 && host_integerp (TREE_OPERAND (arg0, 1), false)
11778 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11780 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11781 + TREE_INT_CST_LOW (arg1));
11783 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11784 being well defined. */
11785 if (low >= TYPE_PRECISION (type))
11787 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11788 low = low % TYPE_PRECISION (type);
11789 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11790 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11791 TREE_OPERAND (arg0, 0));
11793 low = TYPE_PRECISION (type) - 1;
11796 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11797 build_int_cst (type, low));
11800 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11801 into x & ((unsigned)-1 >> c) for unsigned types. */
11802 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11803 || (TYPE_UNSIGNED (type)
11804 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11805 && host_integerp (arg1, false)
11806 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11807 && host_integerp (TREE_OPERAND (arg0, 1), false)
11808 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11810 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11811 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11817 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11819 lshift = build_int_cst (type, -1);
11820 lshift = int_const_binop (code, lshift, arg1);
11822 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11826 /* Rewrite an LROTATE_EXPR by a constant into an
11827 RROTATE_EXPR by a new constant. */
11828 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11830 tree tem = build_int_cst (TREE_TYPE (arg1),
11831 TYPE_PRECISION (type));
11832 tem = const_binop (MINUS_EXPR, tem, arg1);
11833 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11836 /* If we have a rotate of a bit operation with the rotate count and
11837 the second operand of the bit operation both constant,
11838 permute the two operations. */
11839 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11840 && (TREE_CODE (arg0) == BIT_AND_EXPR
11841 || TREE_CODE (arg0) == BIT_IOR_EXPR
11842 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11843 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11844 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11845 fold_build2_loc (loc, code, type,
11846 TREE_OPERAND (arg0, 0), arg1),
11847 fold_build2_loc (loc, code, type,
11848 TREE_OPERAND (arg0, 1), arg1));
11850 /* Two consecutive rotates adding up to the precision of the
11851 type can be ignored. */
11852 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11853 && TREE_CODE (arg0) == RROTATE_EXPR
11854 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11855 && TREE_INT_CST_HIGH (arg1) == 0
11856 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11857 && ((TREE_INT_CST_LOW (arg1)
11858 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11859 == (unsigned int) TYPE_PRECISION (type)))
11860 return TREE_OPERAND (arg0, 0);
11862 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11863 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11864 if the latter can be further optimized. */
11865 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11866 && TREE_CODE (arg0) == BIT_AND_EXPR
11867 && TREE_CODE (arg1) == INTEGER_CST
11868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11870 tree mask = fold_build2_loc (loc, code, type,
11871 fold_convert_loc (loc, type,
11872 TREE_OPERAND (arg0, 1)),
11874 tree shift = fold_build2_loc (loc, code, type,
11875 fold_convert_loc (loc, type,
11876 TREE_OPERAND (arg0, 0)),
11878 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11886 if (operand_equal_p (arg0, arg1, 0))
11887 return omit_one_operand_loc (loc, type, arg0, arg1);
11888 if (INTEGRAL_TYPE_P (type)
11889 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11890 return omit_one_operand_loc (loc, type, arg1, arg0);
11891 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11897 if (operand_equal_p (arg0, arg1, 0))
11898 return omit_one_operand_loc (loc, type, arg0, arg1);
11899 if (INTEGRAL_TYPE_P (type)
11900 && TYPE_MAX_VALUE (type)
11901 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11902 return omit_one_operand_loc (loc, type, arg1, arg0);
11903 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11908 case TRUTH_ANDIF_EXPR:
11909 /* Note that the operands of this must be ints
11910 and their values must be 0 or 1.
11911 ("true" is a fixed value perhaps depending on the language.) */
11912 /* If first arg is constant zero, return it. */
11913 if (integer_zerop (arg0))
11914 return fold_convert_loc (loc, type, arg0);
11915 case TRUTH_AND_EXPR:
11916 /* If either arg is constant true, drop it. */
11917 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11918 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11919 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11920 /* Preserve sequence points. */
11921 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11922 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11923 /* If second arg is constant zero, result is zero, but first arg
11924 must be evaluated. */
11925 if (integer_zerop (arg1))
11926 return omit_one_operand_loc (loc, type, arg1, arg0);
11927 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11928 case will be handled here. */
11929 if (integer_zerop (arg0))
11930 return omit_one_operand_loc (loc, type, arg0, arg1);
11932 /* !X && X is always false. */
11933 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11934 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11935 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11936 /* X && !X is always false. */
11937 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11938 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11939 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11941 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11942 means A >= Y && A != MAX, but in this case we know that
11945 if (!TREE_SIDE_EFFECTS (arg0)
11946 && !TREE_SIDE_EFFECTS (arg1))
11948 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11949 if (tem && !operand_equal_p (tem, arg0, 0))
11950 return fold_build2_loc (loc, code, type, tem, arg1);
11952 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11953 if (tem && !operand_equal_p (tem, arg1, 0))
11954 return fold_build2_loc (loc, code, type, arg0, tem);
11958 /* We only do these simplifications if we are optimizing. */
11962 /* Check for things like (A || B) && (A || C). We can convert this
11963 to A || (B && C). Note that either operator can be any of the four
11964 truth and/or operations and the transformation will still be
11965 valid. Also note that we only care about order for the
11966 ANDIF and ORIF operators. If B contains side effects, this
11967 might change the truth-value of A. */
11968 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11969 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11970 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11971 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11972 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11973 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11975 tree a00 = TREE_OPERAND (arg0, 0);
11976 tree a01 = TREE_OPERAND (arg0, 1);
11977 tree a10 = TREE_OPERAND (arg1, 0);
11978 tree a11 = TREE_OPERAND (arg1, 1);
11979 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11980 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11981 && (code == TRUTH_AND_EXPR
11982 || code == TRUTH_OR_EXPR));
11984 if (operand_equal_p (a00, a10, 0))
11985 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11986 fold_build2_loc (loc, code, type, a01, a11));
11987 else if (commutative && operand_equal_p (a00, a11, 0))
11988 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11989 fold_build2_loc (loc, code, type, a01, a10));
11990 else if (commutative && operand_equal_p (a01, a10, 0))
11991 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11992 fold_build2_loc (loc, code, type, a00, a11));
11994 /* This case if tricky because we must either have commutative
11995 operators or else A10 must not have side-effects. */
11997 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11998 && operand_equal_p (a01, a11, 0))
11999 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12000 fold_build2_loc (loc, code, type, a00, a10),
12004 /* See if we can build a range comparison. */
12005 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12008 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12009 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12011 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12013 return fold_build2_loc (loc, code, type, tem, arg1);
12016 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12017 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12019 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12021 return fold_build2_loc (loc, code, type, arg0, tem);
12024 /* Check for the possibility of merging component references. If our
12025 lhs is another similar operation, try to merge its rhs with our
12026 rhs. Then try to merge our lhs and rhs. */
12027 if (TREE_CODE (arg0) == code
12028 && 0 != (tem = fold_truthop (loc, code, type,
12029 TREE_OPERAND (arg0, 1), arg1)))
12030 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12032 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12037 case TRUTH_ORIF_EXPR:
12038 /* Note that the operands of this must be ints
12039 and their values must be 0 or true.
12040 ("true" is a fixed value perhaps depending on the language.) */
12041 /* If first arg is constant true, return it. */
12042 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12043 return fold_convert_loc (loc, type, arg0);
12044 case TRUTH_OR_EXPR:
12045 /* If either arg is constant zero, drop it. */
12046 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12047 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12048 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12049 /* Preserve sequence points. */
12050 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12051 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12052 /* If second arg is constant true, result is true, but we must
12053 evaluate first arg. */
12054 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12055 return omit_one_operand_loc (loc, type, arg1, arg0);
12056 /* Likewise for first arg, but note this only occurs here for
12058 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12059 return omit_one_operand_loc (loc, type, arg0, arg1);
12061 /* !X || X is always true. */
12062 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12063 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12064 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12065 /* X || !X is always true. */
12066 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12067 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12068 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12070 /* (X && !Y) || (!X && Y) is X ^ Y */
12071 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12072 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12074 tree a0, a1, l0, l1, n0, n1;
12076 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12077 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12079 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12080 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12082 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12083 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12085 if ((operand_equal_p (n0, a0, 0)
12086 && operand_equal_p (n1, a1, 0))
12087 || (operand_equal_p (n0, a1, 0)
12088 && operand_equal_p (n1, a0, 0)))
12089 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12093 case TRUTH_XOR_EXPR:
12094 /* If the second arg is constant zero, drop it. */
12095 if (integer_zerop (arg1))
12096 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12097 /* If the second arg is constant true, this is a logical inversion. */
12098 if (integer_onep (arg1))
12100 /* Only call invert_truthvalue if operand is a truth value. */
12101 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12102 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12104 tem = invert_truthvalue_loc (loc, arg0);
12105 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12107 /* Identical arguments cancel to zero. */
12108 if (operand_equal_p (arg0, arg1, 0))
12109 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12111 /* !X ^ X is always true. */
12112 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12113 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12114 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12116 /* X ^ !X is always true. */
12117 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12118 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12119 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12128 tem = fold_comparison (loc, code, type, op0, op1);
12129 if (tem != NULL_TREE)
12132 /* bool_var != 0 becomes bool_var. */
12133 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12134 && code == NE_EXPR)
12135 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12137 /* bool_var == 1 becomes bool_var. */
12138 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12139 && code == EQ_EXPR)
12140 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12142 /* bool_var != 1 becomes !bool_var. */
12143 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12144 && code == NE_EXPR)
12145 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12146 fold_convert_loc (loc, type, arg0));
12148 /* bool_var == 0 becomes !bool_var. */
12149 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12150 && code == EQ_EXPR)
12151 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12152 fold_convert_loc (loc, type, arg0));
12154 /* !exp != 0 becomes !exp */
12155 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12156 && code == NE_EXPR)
12157 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12159 /* If this is an equality comparison of the address of two non-weak,
12160 unaliased symbols neither of which are extern (since we do not
12161 have access to attributes for externs), then we know the result. */
12162 if (TREE_CODE (arg0) == ADDR_EXPR
12163 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12164 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12165 && ! lookup_attribute ("alias",
12166 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12167 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12168 && TREE_CODE (arg1) == ADDR_EXPR
12169 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12170 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12171 && ! lookup_attribute ("alias",
12172 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12173 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12175 /* We know that we're looking at the address of two
12176 non-weak, unaliased, static _DECL nodes.
12178 It is both wasteful and incorrect to call operand_equal_p
12179 to compare the two ADDR_EXPR nodes. It is wasteful in that
12180 all we need to do is test pointer equality for the arguments
12181 to the two ADDR_EXPR nodes. It is incorrect to use
12182 operand_equal_p as that function is NOT equivalent to a
12183 C equality test. It can in fact return false for two
12184 objects which would test as equal using the C equality
12186 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12187 return constant_boolean_node (equal
12188 ? code == EQ_EXPR : code != EQ_EXPR,
12192 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12193 a MINUS_EXPR of a constant, we can convert it into a comparison with
12194 a revised constant as long as no overflow occurs. */
12195 if (TREE_CODE (arg1) == INTEGER_CST
12196 && (TREE_CODE (arg0) == PLUS_EXPR
12197 || TREE_CODE (arg0) == MINUS_EXPR)
12198 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12199 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12200 ? MINUS_EXPR : PLUS_EXPR,
12201 fold_convert_loc (loc, TREE_TYPE (arg0),
12203 TREE_OPERAND (arg0, 1)))
12204 && !TREE_OVERFLOW (tem))
12205 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12207 /* Similarly for a NEGATE_EXPR. */
12208 if (TREE_CODE (arg0) == NEGATE_EXPR
12209 && TREE_CODE (arg1) == INTEGER_CST
12210 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12212 && TREE_CODE (tem) == INTEGER_CST
12213 && !TREE_OVERFLOW (tem))
12214 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12216 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12217 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12218 && TREE_CODE (arg1) == INTEGER_CST
12219 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12220 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12221 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12222 fold_convert_loc (loc,
12225 TREE_OPERAND (arg0, 1)));
12227 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12228 if ((TREE_CODE (arg0) == PLUS_EXPR
12229 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12230 || TREE_CODE (arg0) == MINUS_EXPR)
12231 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12234 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12235 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12237 tree val = TREE_OPERAND (arg0, 1);
12238 return omit_two_operands_loc (loc, type,
12239 fold_build2_loc (loc, code, type,
12241 build_int_cst (TREE_TYPE (val),
12243 TREE_OPERAND (arg0, 0), arg1);
12246 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12247 if (TREE_CODE (arg0) == MINUS_EXPR
12248 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12249 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12252 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12254 return omit_two_operands_loc (loc, type,
12256 ? boolean_true_node : boolean_false_node,
12257 TREE_OPERAND (arg0, 1), arg1);
12260 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12261 for !=. Don't do this for ordered comparisons due to overflow. */
12262 if (TREE_CODE (arg0) == MINUS_EXPR
12263 && integer_zerop (arg1))
12264 return fold_build2_loc (loc, code, type,
12265 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12267 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12268 if (TREE_CODE (arg0) == ABS_EXPR
12269 && (integer_zerop (arg1) || real_zerop (arg1)))
12270 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12272 /* If this is an EQ or NE comparison with zero and ARG0 is
12273 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12274 two operations, but the latter can be done in one less insn
12275 on machines that have only two-operand insns or on which a
12276 constant cannot be the first operand. */
12277 if (TREE_CODE (arg0) == BIT_AND_EXPR
12278 && integer_zerop (arg1))
12280 tree arg00 = TREE_OPERAND (arg0, 0);
12281 tree arg01 = TREE_OPERAND (arg0, 1);
12282 if (TREE_CODE (arg00) == LSHIFT_EXPR
12283 && integer_onep (TREE_OPERAND (arg00, 0)))
12285 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12286 arg01, TREE_OPERAND (arg00, 1));
12287 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12288 build_int_cst (TREE_TYPE (arg0), 1));
12289 return fold_build2_loc (loc, code, type,
12290 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12293 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12294 && integer_onep (TREE_OPERAND (arg01, 0)))
12296 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12297 arg00, TREE_OPERAND (arg01, 1));
12298 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12299 build_int_cst (TREE_TYPE (arg0), 1));
12300 return fold_build2_loc (loc, code, type,
12301 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12306 /* If this is an NE or EQ comparison of zero against the result of a
12307 signed MOD operation whose second operand is a power of 2, make
12308 the MOD operation unsigned since it is simpler and equivalent. */
12309 if (integer_zerop (arg1)
12310 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12311 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12312 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12313 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12314 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12315 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12317 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12318 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12319 fold_convert_loc (loc, newtype,
12320 TREE_OPERAND (arg0, 0)),
12321 fold_convert_loc (loc, newtype,
12322 TREE_OPERAND (arg0, 1)));
12324 return fold_build2_loc (loc, code, type, newmod,
12325 fold_convert_loc (loc, newtype, arg1));
12328 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12329 C1 is a valid shift constant, and C2 is a power of two, i.e.
12331 if (TREE_CODE (arg0) == BIT_AND_EXPR
12332 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12333 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12335 && integer_pow2p (TREE_OPERAND (arg0, 1))
12336 && integer_zerop (arg1))
12338 tree itype = TREE_TYPE (arg0);
12339 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12340 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12342 /* Check for a valid shift count. */
12343 if (TREE_INT_CST_HIGH (arg001) == 0
12344 && TREE_INT_CST_LOW (arg001) < prec)
12346 tree arg01 = TREE_OPERAND (arg0, 1);
12347 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12348 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12349 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12350 can be rewritten as (X & (C2 << C1)) != 0. */
12351 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12353 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12354 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12355 return fold_build2_loc (loc, code, type, tem,
12356 fold_convert_loc (loc, itype, arg1));
12358 /* Otherwise, for signed (arithmetic) shifts,
12359 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12360 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12361 else if (!TYPE_UNSIGNED (itype))
12362 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12363 arg000, build_int_cst (itype, 0));
12364 /* Otherwise, of unsigned (logical) shifts,
12365 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12366 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12368 return omit_one_operand_loc (loc, type,
12369 code == EQ_EXPR ? integer_one_node
12370 : integer_zero_node,
12375 /* If this is an NE comparison of zero with an AND of one, remove the
12376 comparison since the AND will give the correct value. */
12377 if (code == NE_EXPR
12378 && integer_zerop (arg1)
12379 && TREE_CODE (arg0) == BIT_AND_EXPR
12380 && integer_onep (TREE_OPERAND (arg0, 1)))
12381 return fold_convert_loc (loc, type, arg0);
12383 /* If we have (A & C) == C where C is a power of 2, convert this into
12384 (A & C) != 0. Similarly for NE_EXPR. */
12385 if (TREE_CODE (arg0) == BIT_AND_EXPR
12386 && integer_pow2p (TREE_OPERAND (arg0, 1))
12387 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12388 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12389 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12390 integer_zero_node));
12392 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12393 bit, then fold the expression into A < 0 or A >= 0. */
12394 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12398 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12399 Similarly for NE_EXPR. */
12400 if (TREE_CODE (arg0) == BIT_AND_EXPR
12401 && TREE_CODE (arg1) == INTEGER_CST
12402 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12404 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12405 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12406 TREE_OPERAND (arg0, 1));
12408 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12409 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12411 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12412 if (integer_nonzerop (dandnotc))
12413 return omit_one_operand_loc (loc, type, rslt, arg0);
12416 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12417 Similarly for NE_EXPR. */
12418 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12419 && TREE_CODE (arg1) == INTEGER_CST
12420 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12422 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12424 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12425 TREE_OPERAND (arg0, 1),
12426 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12427 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12428 if (integer_nonzerop (candnotd))
12429 return omit_one_operand_loc (loc, type, rslt, arg0);
12432 /* If this is a comparison of a field, we may be able to simplify it. */
12433 if ((TREE_CODE (arg0) == COMPONENT_REF
12434 || TREE_CODE (arg0) == BIT_FIELD_REF)
12435 /* Handle the constant case even without -O
12436 to make sure the warnings are given. */
12437 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12439 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12444 /* Optimize comparisons of strlen vs zero to a compare of the
12445 first character of the string vs zero. To wit,
12446 strlen(ptr) == 0 => *ptr == 0
12447 strlen(ptr) != 0 => *ptr != 0
12448 Other cases should reduce to one of these two (or a constant)
12449 due to the return value of strlen being unsigned. */
12450 if (TREE_CODE (arg0) == CALL_EXPR
12451 && integer_zerop (arg1))
12453 tree fndecl = get_callee_fndecl (arg0);
12456 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12457 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12458 && call_expr_nargs (arg0) == 1
12459 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12461 tree iref = build_fold_indirect_ref_loc (loc,
12462 CALL_EXPR_ARG (arg0, 0));
12463 return fold_build2_loc (loc, code, type, iref,
12464 build_int_cst (TREE_TYPE (iref), 0));
12468 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12469 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12470 if (TREE_CODE (arg0) == RSHIFT_EXPR
12471 && integer_zerop (arg1)
12472 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12474 tree arg00 = TREE_OPERAND (arg0, 0);
12475 tree arg01 = TREE_OPERAND (arg0, 1);
12476 tree itype = TREE_TYPE (arg00);
12477 if (TREE_INT_CST_HIGH (arg01) == 0
12478 && TREE_INT_CST_LOW (arg01)
12479 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12481 if (TYPE_UNSIGNED (itype))
12483 itype = signed_type_for (itype);
12484 arg00 = fold_convert_loc (loc, itype, arg00);
12486 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12487 type, arg00, build_int_cst (itype, 0));
12491 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12492 if (integer_zerop (arg1)
12493 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12494 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12495 TREE_OPERAND (arg0, 1));
12497 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12498 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12499 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12500 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12501 build_int_cst (TREE_TYPE (arg0), 0));
12502 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12503 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12504 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12505 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12506 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12507 build_int_cst (TREE_TYPE (arg0), 0));
12509 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12510 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12511 && TREE_CODE (arg1) == INTEGER_CST
12512 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12513 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12514 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12515 TREE_OPERAND (arg0, 1), arg1));
12517 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12518 (X & C) == 0 when C is a single bit. */
12519 if (TREE_CODE (arg0) == BIT_AND_EXPR
12520 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12521 && integer_zerop (arg1)
12522 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12524 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12525 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12526 TREE_OPERAND (arg0, 1));
12527 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12529 fold_convert_loc (loc, TREE_TYPE (arg0),
12533 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12534 constant C is a power of two, i.e. a single bit. */
12535 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12536 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12537 && integer_zerop (arg1)
12538 && integer_pow2p (TREE_OPERAND (arg0, 1))
12539 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12540 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12542 tree arg00 = TREE_OPERAND (arg0, 0);
12543 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12544 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12547 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12548 when is C is a power of two, i.e. a single bit. */
12549 if (TREE_CODE (arg0) == BIT_AND_EXPR
12550 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12551 && integer_zerop (arg1)
12552 && integer_pow2p (TREE_OPERAND (arg0, 1))
12553 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12554 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12556 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12557 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12558 arg000, TREE_OPERAND (arg0, 1));
12559 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12560 tem, build_int_cst (TREE_TYPE (tem), 0));
12563 if (integer_zerop (arg1)
12564 && tree_expr_nonzero_p (arg0))
12566 tree res = constant_boolean_node (code==NE_EXPR, type);
12567 return omit_one_operand_loc (loc, type, res, arg0);
12570 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12571 if (TREE_CODE (arg0) == NEGATE_EXPR
12572 && TREE_CODE (arg1) == NEGATE_EXPR)
12573 return fold_build2_loc (loc, code, type,
12574 TREE_OPERAND (arg0, 0),
12575 fold_convert_loc (loc, TREE_TYPE (arg0),
12576 TREE_OPERAND (arg1, 0)));
12578 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12579 if (TREE_CODE (arg0) == BIT_AND_EXPR
12580 && TREE_CODE (arg1) == BIT_AND_EXPR)
12582 tree arg00 = TREE_OPERAND (arg0, 0);
12583 tree arg01 = TREE_OPERAND (arg0, 1);
12584 tree arg10 = TREE_OPERAND (arg1, 0);
12585 tree arg11 = TREE_OPERAND (arg1, 1);
12586 tree itype = TREE_TYPE (arg0);
12588 if (operand_equal_p (arg01, arg11, 0))
12589 return fold_build2_loc (loc, code, type,
12590 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12591 fold_build2_loc (loc,
12592 BIT_XOR_EXPR, itype,
12595 build_int_cst (itype, 0));
12597 if (operand_equal_p (arg01, arg10, 0))
12598 return fold_build2_loc (loc, code, type,
12599 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12600 fold_build2_loc (loc,
12601 BIT_XOR_EXPR, itype,
12604 build_int_cst (itype, 0));
12606 if (operand_equal_p (arg00, arg11, 0))
12607 return fold_build2_loc (loc, code, type,
12608 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12609 fold_build2_loc (loc,
12610 BIT_XOR_EXPR, itype,
12613 build_int_cst (itype, 0));
12615 if (operand_equal_p (arg00, arg10, 0))
12616 return fold_build2_loc (loc, code, type,
12617 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12618 fold_build2_loc (loc,
12619 BIT_XOR_EXPR, itype,
12622 build_int_cst (itype, 0));
12625 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12626 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12628 tree arg00 = TREE_OPERAND (arg0, 0);
12629 tree arg01 = TREE_OPERAND (arg0, 1);
12630 tree arg10 = TREE_OPERAND (arg1, 0);
12631 tree arg11 = TREE_OPERAND (arg1, 1);
12632 tree itype = TREE_TYPE (arg0);
12634 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12635 operand_equal_p guarantees no side-effects so we don't need
12636 to use omit_one_operand on Z. */
12637 if (operand_equal_p (arg01, arg11, 0))
12638 return fold_build2_loc (loc, code, type, arg00,
12639 fold_convert_loc (loc, TREE_TYPE (arg00),
12641 if (operand_equal_p (arg01, arg10, 0))
12642 return fold_build2_loc (loc, code, type, arg00,
12643 fold_convert_loc (loc, TREE_TYPE (arg00),
12645 if (operand_equal_p (arg00, arg11, 0))
12646 return fold_build2_loc (loc, code, type, arg01,
12647 fold_convert_loc (loc, TREE_TYPE (arg01),
12649 if (operand_equal_p (arg00, arg10, 0))
12650 return fold_build2_loc (loc, code, type, arg01,
12651 fold_convert_loc (loc, TREE_TYPE (arg01),
12654 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12655 if (TREE_CODE (arg01) == INTEGER_CST
12656 && TREE_CODE (arg11) == INTEGER_CST)
12658 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12659 fold_convert_loc (loc, itype, arg11));
12660 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12661 return fold_build2_loc (loc, code, type, tem,
12662 fold_convert_loc (loc, itype, arg10));
12666 /* Attempt to simplify equality/inequality comparisons of complex
12667 values. Only lower the comparison if the result is known or
12668 can be simplified to a single scalar comparison. */
12669 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12670 || TREE_CODE (arg0) == COMPLEX_CST)
12671 && (TREE_CODE (arg1) == COMPLEX_EXPR
12672 || TREE_CODE (arg1) == COMPLEX_CST))
12674 tree real0, imag0, real1, imag1;
12677 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12679 real0 = TREE_OPERAND (arg0, 0);
12680 imag0 = TREE_OPERAND (arg0, 1);
12684 real0 = TREE_REALPART (arg0);
12685 imag0 = TREE_IMAGPART (arg0);
12688 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12690 real1 = TREE_OPERAND (arg1, 0);
12691 imag1 = TREE_OPERAND (arg1, 1);
12695 real1 = TREE_REALPART (arg1);
12696 imag1 = TREE_IMAGPART (arg1);
12699 rcond = fold_binary_loc (loc, code, type, real0, real1);
12700 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12702 if (integer_zerop (rcond))
12704 if (code == EQ_EXPR)
12705 return omit_two_operands_loc (loc, type, boolean_false_node,
12707 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12711 if (code == NE_EXPR)
12712 return omit_two_operands_loc (loc, type, boolean_true_node,
12714 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12718 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12719 if (icond && TREE_CODE (icond) == INTEGER_CST)
12721 if (integer_zerop (icond))
12723 if (code == EQ_EXPR)
12724 return omit_two_operands_loc (loc, type, boolean_false_node,
12726 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12730 if (code == NE_EXPR)
12731 return omit_two_operands_loc (loc, type, boolean_true_node,
12733 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12744 tem = fold_comparison (loc, code, type, op0, op1);
12745 if (tem != NULL_TREE)
12748 /* Transform comparisons of the form X +- C CMP X. */
12749 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12750 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12751 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12752 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12753 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12754 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12756 tree arg01 = TREE_OPERAND (arg0, 1);
12757 enum tree_code code0 = TREE_CODE (arg0);
12760 if (TREE_CODE (arg01) == REAL_CST)
12761 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12763 is_positive = tree_int_cst_sgn (arg01);
12765 /* (X - c) > X becomes false. */
12766 if (code == GT_EXPR
12767 && ((code0 == MINUS_EXPR && is_positive >= 0)
12768 || (code0 == PLUS_EXPR && is_positive <= 0)))
12770 if (TREE_CODE (arg01) == INTEGER_CST
12771 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12772 fold_overflow_warning (("assuming signed overflow does not "
12773 "occur when assuming that (X - c) > X "
12774 "is always false"),
12775 WARN_STRICT_OVERFLOW_ALL);
12776 return constant_boolean_node (0, type);
12779 /* Likewise (X + c) < X becomes false. */
12780 if (code == LT_EXPR
12781 && ((code0 == PLUS_EXPR && is_positive >= 0)
12782 || (code0 == MINUS_EXPR && is_positive <= 0)))
12784 if (TREE_CODE (arg01) == INTEGER_CST
12785 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12786 fold_overflow_warning (("assuming signed overflow does not "
12787 "occur when assuming that "
12788 "(X + c) < X is always false"),
12789 WARN_STRICT_OVERFLOW_ALL);
12790 return constant_boolean_node (0, type);
12793 /* Convert (X - c) <= X to true. */
12794 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12796 && ((code0 == MINUS_EXPR && is_positive >= 0)
12797 || (code0 == PLUS_EXPR && is_positive <= 0)))
12799 if (TREE_CODE (arg01) == INTEGER_CST
12800 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12801 fold_overflow_warning (("assuming signed overflow does not "
12802 "occur when assuming that "
12803 "(X - c) <= X is always true"),
12804 WARN_STRICT_OVERFLOW_ALL);
12805 return constant_boolean_node (1, type);
12808 /* Convert (X + c) >= X to true. */
12809 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12811 && ((code0 == PLUS_EXPR && is_positive >= 0)
12812 || (code0 == MINUS_EXPR && is_positive <= 0)))
12814 if (TREE_CODE (arg01) == INTEGER_CST
12815 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12816 fold_overflow_warning (("assuming signed overflow does not "
12817 "occur when assuming that "
12818 "(X + c) >= X is always true"),
12819 WARN_STRICT_OVERFLOW_ALL);
12820 return constant_boolean_node (1, type);
12823 if (TREE_CODE (arg01) == INTEGER_CST)
12825 /* Convert X + c > X and X - c < X to true for integers. */
12826 if (code == GT_EXPR
12827 && ((code0 == PLUS_EXPR && is_positive > 0)
12828 || (code0 == MINUS_EXPR && is_positive < 0)))
12830 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12831 fold_overflow_warning (("assuming signed overflow does "
12832 "not occur when assuming that "
12833 "(X + c) > X is always true"),
12834 WARN_STRICT_OVERFLOW_ALL);
12835 return constant_boolean_node (1, type);
12838 if (code == LT_EXPR
12839 && ((code0 == MINUS_EXPR && is_positive > 0)
12840 || (code0 == PLUS_EXPR && is_positive < 0)))
12842 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12843 fold_overflow_warning (("assuming signed overflow does "
12844 "not occur when assuming that "
12845 "(X - c) < X is always true"),
12846 WARN_STRICT_OVERFLOW_ALL);
12847 return constant_boolean_node (1, type);
12850 /* Convert X + c <= X and X - c >= X to false for integers. */
12851 if (code == LE_EXPR
12852 && ((code0 == PLUS_EXPR && is_positive > 0)
12853 || (code0 == MINUS_EXPR && is_positive < 0)))
12855 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12856 fold_overflow_warning (("assuming signed overflow does "
12857 "not occur when assuming that "
12858 "(X + c) <= X is always false"),
12859 WARN_STRICT_OVERFLOW_ALL);
12860 return constant_boolean_node (0, type);
12863 if (code == GE_EXPR
12864 && ((code0 == MINUS_EXPR && is_positive > 0)
12865 || (code0 == PLUS_EXPR && is_positive < 0)))
12867 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12868 fold_overflow_warning (("assuming signed overflow does "
12869 "not occur when assuming that "
12870 "(X - c) >= X is always false"),
12871 WARN_STRICT_OVERFLOW_ALL);
12872 return constant_boolean_node (0, type);
12877 /* Comparisons with the highest or lowest possible integer of
12878 the specified precision will have known values. */
12880 tree arg1_type = TREE_TYPE (arg1);
12881 unsigned int width = TYPE_PRECISION (arg1_type);
12883 if (TREE_CODE (arg1) == INTEGER_CST
12884 && width <= 2 * HOST_BITS_PER_WIDE_INT
12885 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12887 HOST_WIDE_INT signed_max_hi;
12888 unsigned HOST_WIDE_INT signed_max_lo;
12889 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12891 if (width <= HOST_BITS_PER_WIDE_INT)
12893 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12898 if (TYPE_UNSIGNED (arg1_type))
12900 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12906 max_lo = signed_max_lo;
12907 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12913 width -= HOST_BITS_PER_WIDE_INT;
12914 signed_max_lo = -1;
12915 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12920 if (TYPE_UNSIGNED (arg1_type))
12922 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12927 max_hi = signed_max_hi;
12928 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12932 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12933 && TREE_INT_CST_LOW (arg1) == max_lo)
12937 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12940 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12943 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12946 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12948 /* The GE_EXPR and LT_EXPR cases above are not normally
12949 reached because of previous transformations. */
12954 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12956 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12960 arg1 = const_binop (PLUS_EXPR, arg1,
12961 build_int_cst (TREE_TYPE (arg1), 1));
12962 return fold_build2_loc (loc, EQ_EXPR, type,
12963 fold_convert_loc (loc,
12964 TREE_TYPE (arg1), arg0),
12967 arg1 = const_binop (PLUS_EXPR, arg1,
12968 build_int_cst (TREE_TYPE (arg1), 1));
12969 return fold_build2_loc (loc, NE_EXPR, type,
12970 fold_convert_loc (loc, TREE_TYPE (arg1),
12976 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12978 && TREE_INT_CST_LOW (arg1) == min_lo)
12982 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12985 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12988 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12991 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12996 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12998 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13002 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13003 return fold_build2_loc (loc, NE_EXPR, type,
13004 fold_convert_loc (loc,
13005 TREE_TYPE (arg1), arg0),
13008 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13009 return fold_build2_loc (loc, EQ_EXPR, type,
13010 fold_convert_loc (loc, TREE_TYPE (arg1),
13017 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13018 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13019 && TYPE_UNSIGNED (arg1_type)
13020 /* We will flip the signedness of the comparison operator
13021 associated with the mode of arg1, so the sign bit is
13022 specified by this mode. Check that arg1 is the signed
13023 max associated with this sign bit. */
13024 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13025 /* signed_type does not work on pointer types. */
13026 && INTEGRAL_TYPE_P (arg1_type))
13028 /* The following case also applies to X < signed_max+1
13029 and X >= signed_max+1 because previous transformations. */
13030 if (code == LE_EXPR || code == GT_EXPR)
13033 st = signed_type_for (TREE_TYPE (arg1));
13034 return fold_build2_loc (loc,
13035 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13036 type, fold_convert_loc (loc, st, arg0),
13037 build_int_cst (st, 0));
13043 /* If we are comparing an ABS_EXPR with a constant, we can
13044 convert all the cases into explicit comparisons, but they may
13045 well not be faster than doing the ABS and one comparison.
13046 But ABS (X) <= C is a range comparison, which becomes a subtraction
13047 and a comparison, and is probably faster. */
13048 if (code == LE_EXPR
13049 && TREE_CODE (arg1) == INTEGER_CST
13050 && TREE_CODE (arg0) == ABS_EXPR
13051 && ! TREE_SIDE_EFFECTS (arg0)
13052 && (0 != (tem = negate_expr (arg1)))
13053 && TREE_CODE (tem) == INTEGER_CST
13054 && !TREE_OVERFLOW (tem))
13055 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13056 build2 (GE_EXPR, type,
13057 TREE_OPERAND (arg0, 0), tem),
13058 build2 (LE_EXPR, type,
13059 TREE_OPERAND (arg0, 0), arg1));
13061 /* Convert ABS_EXPR<x> >= 0 to true. */
13062 strict_overflow_p = false;
13063 if (code == GE_EXPR
13064 && (integer_zerop (arg1)
13065 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13066 && real_zerop (arg1)))
13067 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13069 if (strict_overflow_p)
13070 fold_overflow_warning (("assuming signed overflow does not occur "
13071 "when simplifying comparison of "
13072 "absolute value and zero"),
13073 WARN_STRICT_OVERFLOW_CONDITIONAL);
13074 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13077 /* Convert ABS_EXPR<x> < 0 to false. */
13078 strict_overflow_p = false;
13079 if (code == LT_EXPR
13080 && (integer_zerop (arg1) || real_zerop (arg1))
13081 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13083 if (strict_overflow_p)
13084 fold_overflow_warning (("assuming signed overflow does not occur "
13085 "when simplifying comparison of "
13086 "absolute value and zero"),
13087 WARN_STRICT_OVERFLOW_CONDITIONAL);
13088 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13091 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13092 and similarly for >= into !=. */
13093 if ((code == LT_EXPR || code == GE_EXPR)
13094 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13095 && TREE_CODE (arg1) == LSHIFT_EXPR
13096 && integer_onep (TREE_OPERAND (arg1, 0)))
13097 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13098 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13099 TREE_OPERAND (arg1, 1)),
13100 build_int_cst (TREE_TYPE (arg0), 0));
13102 if ((code == LT_EXPR || code == GE_EXPR)
13103 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13104 && CONVERT_EXPR_P (arg1)
13105 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13106 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13108 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13109 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13110 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13111 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13112 build_int_cst (TREE_TYPE (arg0), 0));
13117 case UNORDERED_EXPR:
13125 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13127 t1 = fold_relational_const (code, type, arg0, arg1);
13128 if (t1 != NULL_TREE)
13132 /* If the first operand is NaN, the result is constant. */
13133 if (TREE_CODE (arg0) == REAL_CST
13134 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13135 && (code != LTGT_EXPR || ! flag_trapping_math))
13137 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13138 ? integer_zero_node
13139 : integer_one_node;
13140 return omit_one_operand_loc (loc, type, t1, arg1);
13143 /* If the second operand is NaN, the result is constant. */
13144 if (TREE_CODE (arg1) == REAL_CST
13145 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13146 && (code != LTGT_EXPR || ! flag_trapping_math))
13148 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13149 ? integer_zero_node
13150 : integer_one_node;
13151 return omit_one_operand_loc (loc, type, t1, arg0);
13154 /* Simplify unordered comparison of something with itself. */
13155 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13156 && operand_equal_p (arg0, arg1, 0))
13157 return constant_boolean_node (1, type);
13159 if (code == LTGT_EXPR
13160 && !flag_trapping_math
13161 && operand_equal_p (arg0, arg1, 0))
13162 return constant_boolean_node (0, type);
13164 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13166 tree targ0 = strip_float_extensions (arg0);
13167 tree targ1 = strip_float_extensions (arg1);
13168 tree newtype = TREE_TYPE (targ0);
13170 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13171 newtype = TREE_TYPE (targ1);
13173 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13174 return fold_build2_loc (loc, code, type,
13175 fold_convert_loc (loc, newtype, targ0),
13176 fold_convert_loc (loc, newtype, targ1));
13181 case COMPOUND_EXPR:
13182 /* When pedantic, a compound expression can be neither an lvalue
13183 nor an integer constant expression. */
13184 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13186 /* Don't let (0, 0) be null pointer constant. */
13187 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13188 : fold_convert_loc (loc, type, arg1);
13189 return pedantic_non_lvalue_loc (loc, tem);
13192 if ((TREE_CODE (arg0) == REAL_CST
13193 && TREE_CODE (arg1) == REAL_CST)
13194 || (TREE_CODE (arg0) == INTEGER_CST
13195 && TREE_CODE (arg1) == INTEGER_CST))
13196 return build_complex (type, arg0, arg1);
13197 if (TREE_CODE (arg0) == REALPART_EXPR
13198 && TREE_CODE (arg1) == IMAGPART_EXPR
13199 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 0)))
13200 == TYPE_MAIN_VARIANT (type))
13201 && operand_equal_p (TREE_OPERAND (arg0, 0),
13202 TREE_OPERAND (arg1, 0), 0))
13203 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13204 TREE_OPERAND (arg1, 0));
13208 /* An ASSERT_EXPR should never be passed to fold_binary. */
13209 gcc_unreachable ();
13213 } /* switch (code) */
13216 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13217 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13221 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13223 switch (TREE_CODE (*tp))
13229 *walk_subtrees = 0;
13231 /* ... fall through ... */
13238 /* Return whether the sub-tree ST contains a label which is accessible from
13239 outside the sub-tree. */
13242 contains_label_p (tree st)
13245 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13248 /* Fold a ternary expression of code CODE and type TYPE with operands
13249 OP0, OP1, and OP2. Return the folded expression if folding is
13250 successful. Otherwise, return NULL_TREE. */
13253 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13254 tree op0, tree op1, tree op2)
13257 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13258 enum tree_code_class kind = TREE_CODE_CLASS (code);
13260 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13261 && TREE_CODE_LENGTH (code) == 3);
13263 /* Strip any conversions that don't change the mode. This is safe
13264 for every expression, except for a comparison expression because
13265 its signedness is derived from its operands. So, in the latter
13266 case, only strip conversions that don't change the signedness.
13268 Note that this is done as an internal manipulation within the
13269 constant folder, in order to find the simplest representation of
13270 the arguments so that their form can be studied. In any cases,
13271 the appropriate type conversions should be put back in the tree
13272 that will get out of the constant folder. */
13293 case COMPONENT_REF:
13294 if (TREE_CODE (arg0) == CONSTRUCTOR
13295 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13297 unsigned HOST_WIDE_INT idx;
13299 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13306 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13307 so all simple results must be passed through pedantic_non_lvalue. */
13308 if (TREE_CODE (arg0) == INTEGER_CST)
13310 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13311 tem = integer_zerop (arg0) ? op2 : op1;
13312 /* Only optimize constant conditions when the selected branch
13313 has the same type as the COND_EXPR. This avoids optimizing
13314 away "c ? x : throw", where the throw has a void type.
13315 Avoid throwing away that operand which contains label. */
13316 if ((!TREE_SIDE_EFFECTS (unused_op)
13317 || !contains_label_p (unused_op))
13318 && (! VOID_TYPE_P (TREE_TYPE (tem))
13319 || VOID_TYPE_P (type)))
13320 return pedantic_non_lvalue_loc (loc, tem);
13323 if (operand_equal_p (arg1, op2, 0))
13324 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13326 /* If we have A op B ? A : C, we may be able to convert this to a
13327 simpler expression, depending on the operation and the values
13328 of B and C. Signed zeros prevent all of these transformations,
13329 for reasons given above each one.
13331 Also try swapping the arguments and inverting the conditional. */
13332 if (COMPARISON_CLASS_P (arg0)
13333 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13334 arg1, TREE_OPERAND (arg0, 1))
13335 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13337 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13342 if (COMPARISON_CLASS_P (arg0)
13343 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13345 TREE_OPERAND (arg0, 1))
13346 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13348 location_t loc0 = EXPR_LOCATION (arg0);
13349 if (loc0 == UNKNOWN_LOCATION)
13351 tem = fold_truth_not_expr (loc0, arg0);
13352 if (tem && COMPARISON_CLASS_P (tem))
13354 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13360 /* If the second operand is simpler than the third, swap them
13361 since that produces better jump optimization results. */
13362 if (truth_value_p (TREE_CODE (arg0))
13363 && tree_swap_operands_p (op1, op2, false))
13365 location_t loc0 = EXPR_LOCATION (arg0);
13366 if (loc0 == UNKNOWN_LOCATION)
13368 /* See if this can be inverted. If it can't, possibly because
13369 it was a floating-point inequality comparison, don't do
13371 tem = fold_truth_not_expr (loc0, arg0);
13373 return fold_build3_loc (loc, code, type, tem, op2, op1);
13376 /* Convert A ? 1 : 0 to simply A. */
13377 if (integer_onep (op1)
13378 && integer_zerop (op2)
13379 /* If we try to convert OP0 to our type, the
13380 call to fold will try to move the conversion inside
13381 a COND, which will recurse. In that case, the COND_EXPR
13382 is probably the best choice, so leave it alone. */
13383 && type == TREE_TYPE (arg0))
13384 return pedantic_non_lvalue_loc (loc, arg0);
13386 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13387 over COND_EXPR in cases such as floating point comparisons. */
13388 if (integer_zerop (op1)
13389 && integer_onep (op2)
13390 && truth_value_p (TREE_CODE (arg0)))
13391 return pedantic_non_lvalue_loc (loc,
13392 fold_convert_loc (loc, type,
13393 invert_truthvalue_loc (loc,
13396 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13397 if (TREE_CODE (arg0) == LT_EXPR
13398 && integer_zerop (TREE_OPERAND (arg0, 1))
13399 && integer_zerop (op2)
13400 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13402 /* sign_bit_p only checks ARG1 bits within A's precision.
13403 If <sign bit of A> has wider type than A, bits outside
13404 of A's precision in <sign bit of A> need to be checked.
13405 If they are all 0, this optimization needs to be done
13406 in unsigned A's type, if they are all 1 in signed A's type,
13407 otherwise this can't be done. */
13408 if (TYPE_PRECISION (TREE_TYPE (tem))
13409 < TYPE_PRECISION (TREE_TYPE (arg1))
13410 && TYPE_PRECISION (TREE_TYPE (tem))
13411 < TYPE_PRECISION (type))
13413 unsigned HOST_WIDE_INT mask_lo;
13414 HOST_WIDE_INT mask_hi;
13415 int inner_width, outer_width;
13418 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13419 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13420 if (outer_width > TYPE_PRECISION (type))
13421 outer_width = TYPE_PRECISION (type);
13423 if (outer_width > HOST_BITS_PER_WIDE_INT)
13425 mask_hi = ((unsigned HOST_WIDE_INT) -1
13426 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13432 mask_lo = ((unsigned HOST_WIDE_INT) -1
13433 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13435 if (inner_width > HOST_BITS_PER_WIDE_INT)
13437 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13438 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13442 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13443 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13445 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13446 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13448 tem_type = signed_type_for (TREE_TYPE (tem));
13449 tem = fold_convert_loc (loc, tem_type, tem);
13451 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13452 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13454 tem_type = unsigned_type_for (TREE_TYPE (tem));
13455 tem = fold_convert_loc (loc, tem_type, tem);
13463 fold_convert_loc (loc, type,
13464 fold_build2_loc (loc, BIT_AND_EXPR,
13465 TREE_TYPE (tem), tem,
13466 fold_convert_loc (loc,
13471 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13472 already handled above. */
13473 if (TREE_CODE (arg0) == BIT_AND_EXPR
13474 && integer_onep (TREE_OPERAND (arg0, 1))
13475 && integer_zerop (op2)
13476 && integer_pow2p (arg1))
13478 tree tem = TREE_OPERAND (arg0, 0);
13480 if (TREE_CODE (tem) == RSHIFT_EXPR
13481 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13482 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13483 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13484 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13485 TREE_OPERAND (tem, 0), arg1);
13488 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13489 is probably obsolete because the first operand should be a
13490 truth value (that's why we have the two cases above), but let's
13491 leave it in until we can confirm this for all front-ends. */
13492 if (integer_zerop (op2)
13493 && TREE_CODE (arg0) == NE_EXPR
13494 && integer_zerop (TREE_OPERAND (arg0, 1))
13495 && integer_pow2p (arg1)
13496 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13497 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13498 arg1, OEP_ONLY_CONST))
13499 return pedantic_non_lvalue_loc (loc,
13500 fold_convert_loc (loc, type,
13501 TREE_OPERAND (arg0, 0)));
13503 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13504 if (integer_zerop (op2)
13505 && truth_value_p (TREE_CODE (arg0))
13506 && truth_value_p (TREE_CODE (arg1)))
13507 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13508 fold_convert_loc (loc, type, arg0),
13511 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13512 if (integer_onep (op2)
13513 && truth_value_p (TREE_CODE (arg0))
13514 && truth_value_p (TREE_CODE (arg1)))
13516 location_t loc0 = EXPR_LOCATION (arg0);
13517 if (loc0 == UNKNOWN_LOCATION)
13519 /* Only perform transformation if ARG0 is easily inverted. */
13520 tem = fold_truth_not_expr (loc0, arg0);
13522 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13523 fold_convert_loc (loc, type, tem),
13527 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13528 if (integer_zerop (arg1)
13529 && truth_value_p (TREE_CODE (arg0))
13530 && truth_value_p (TREE_CODE (op2)))
13532 location_t loc0 = EXPR_LOCATION (arg0);
13533 if (loc0 == UNKNOWN_LOCATION)
13535 /* Only perform transformation if ARG0 is easily inverted. */
13536 tem = fold_truth_not_expr (loc0, arg0);
13538 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13539 fold_convert_loc (loc, type, tem),
13543 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13544 if (integer_onep (arg1)
13545 && truth_value_p (TREE_CODE (arg0))
13546 && truth_value_p (TREE_CODE (op2)))
13547 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13548 fold_convert_loc (loc, type, arg0),
13554 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13555 of fold_ternary on them. */
13556 gcc_unreachable ();
13558 case BIT_FIELD_REF:
13559 if ((TREE_CODE (arg0) == VECTOR_CST
13560 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13561 && type == TREE_TYPE (TREE_TYPE (arg0)))
13563 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13564 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13567 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13568 && (idx % width) == 0
13569 && (idx = idx / width)
13570 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13572 tree elements = NULL_TREE;
13574 if (TREE_CODE (arg0) == VECTOR_CST)
13575 elements = TREE_VECTOR_CST_ELTS (arg0);
13578 unsigned HOST_WIDE_INT idx;
13581 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13582 elements = tree_cons (NULL_TREE, value, elements);
13584 while (idx-- > 0 && elements)
13585 elements = TREE_CHAIN (elements);
13587 return TREE_VALUE (elements);
13589 return build_zero_cst (type);
13593 /* A bit-field-ref that referenced the full argument can be stripped. */
13594 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13595 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13596 && integer_zerop (op2))
13597 return fold_convert_loc (loc, type, arg0);
13602 /* For integers we can decompose the FMA if possible. */
13603 if (TREE_CODE (arg0) == INTEGER_CST
13604 && TREE_CODE (arg1) == INTEGER_CST)
13605 return fold_build2_loc (loc, PLUS_EXPR, type,
13606 const_binop (MULT_EXPR, arg0, arg1), arg2);
13607 if (integer_zerop (arg2))
13608 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13610 return fold_fma (loc, type, arg0, arg1, arg2);
13614 } /* switch (code) */
13617 /* Perform constant folding and related simplification of EXPR.
13618 The related simplifications include x*1 => x, x*0 => 0, etc.,
13619 and application of the associative law.
13620 NOP_EXPR conversions may be removed freely (as long as we
13621 are careful not to change the type of the overall expression).
13622 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13623 but we can constant-fold them if they have constant operands. */
13625 #ifdef ENABLE_FOLD_CHECKING
13626 # define fold(x) fold_1 (x)
13627 static tree fold_1 (tree);
13633 const tree t = expr;
13634 enum tree_code code = TREE_CODE (t);
13635 enum tree_code_class kind = TREE_CODE_CLASS (code);
13637 location_t loc = EXPR_LOCATION (expr);
13639 /* Return right away if a constant. */
13640 if (kind == tcc_constant)
13643 /* CALL_EXPR-like objects with variable numbers of operands are
13644 treated specially. */
13645 if (kind == tcc_vl_exp)
13647 if (code == CALL_EXPR)
13649 tem = fold_call_expr (loc, expr, false);
13650 return tem ? tem : expr;
13655 if (IS_EXPR_CODE_CLASS (kind))
13657 tree type = TREE_TYPE (t);
13658 tree op0, op1, op2;
13660 switch (TREE_CODE_LENGTH (code))
13663 op0 = TREE_OPERAND (t, 0);
13664 tem = fold_unary_loc (loc, code, type, op0);
13665 return tem ? tem : expr;
13667 op0 = TREE_OPERAND (t, 0);
13668 op1 = TREE_OPERAND (t, 1);
13669 tem = fold_binary_loc (loc, code, type, op0, op1);
13670 return tem ? tem : expr;
13672 op0 = TREE_OPERAND (t, 0);
13673 op1 = TREE_OPERAND (t, 1);
13674 op2 = TREE_OPERAND (t, 2);
13675 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13676 return tem ? tem : expr;
13686 tree op0 = TREE_OPERAND (t, 0);
13687 tree op1 = TREE_OPERAND (t, 1);
13689 if (TREE_CODE (op1) == INTEGER_CST
13690 && TREE_CODE (op0) == CONSTRUCTOR
13691 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13693 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13694 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13695 unsigned HOST_WIDE_INT begin = 0;
13697 /* Find a matching index by means of a binary search. */
13698 while (begin != end)
13700 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13701 tree index = VEC_index (constructor_elt, elts, middle)->index;
13703 if (TREE_CODE (index) == INTEGER_CST
13704 && tree_int_cst_lt (index, op1))
13705 begin = middle + 1;
13706 else if (TREE_CODE (index) == INTEGER_CST
13707 && tree_int_cst_lt (op1, index))
13709 else if (TREE_CODE (index) == RANGE_EXPR
13710 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13711 begin = middle + 1;
13712 else if (TREE_CODE (index) == RANGE_EXPR
13713 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13716 return VEC_index (constructor_elt, elts, middle)->value;
13724 return fold (DECL_INITIAL (t));
13728 } /* switch (code) */
13731 #ifdef ENABLE_FOLD_CHECKING
13734 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13735 static void fold_check_failed (const_tree, const_tree);
13736 void print_fold_checksum (const_tree);
13738 /* When --enable-checking=fold, compute a digest of expr before
13739 and after actual fold call to see if fold did not accidentally
13740 change original expr. */
13746 struct md5_ctx ctx;
13747 unsigned char checksum_before[16], checksum_after[16];
13750 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13751 md5_init_ctx (&ctx);
13752 fold_checksum_tree (expr, &ctx, ht);
13753 md5_finish_ctx (&ctx, checksum_before);
13756 ret = fold_1 (expr);
13758 md5_init_ctx (&ctx);
13759 fold_checksum_tree (expr, &ctx, ht);
13760 md5_finish_ctx (&ctx, checksum_after);
13763 if (memcmp (checksum_before, checksum_after, 16))
13764 fold_check_failed (expr, ret);
13770 print_fold_checksum (const_tree expr)
13772 struct md5_ctx ctx;
13773 unsigned char checksum[16], cnt;
13776 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13777 md5_init_ctx (&ctx);
13778 fold_checksum_tree (expr, &ctx, ht);
13779 md5_finish_ctx (&ctx, checksum);
13781 for (cnt = 0; cnt < 16; ++cnt)
13782 fprintf (stderr, "%02x", checksum[cnt]);
13783 putc ('\n', stderr);
13787 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13789 internal_error ("fold check: original tree changed by fold");
13793 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13796 enum tree_code code;
13797 union tree_node buf;
13802 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13803 <= sizeof (struct tree_function_decl))
13804 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13807 slot = (void **) htab_find_slot (ht, expr, INSERT);
13810 *slot = CONST_CAST_TREE (expr);
13811 code = TREE_CODE (expr);
13812 if (TREE_CODE_CLASS (code) == tcc_declaration
13813 && DECL_ASSEMBLER_NAME_SET_P (expr))
13815 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13816 memcpy ((char *) &buf, expr, tree_size (expr));
13817 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13818 expr = (tree) &buf;
13820 else if (TREE_CODE_CLASS (code) == tcc_type
13821 && (TYPE_POINTER_TO (expr)
13822 || TYPE_REFERENCE_TO (expr)
13823 || TYPE_CACHED_VALUES_P (expr)
13824 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13825 || TYPE_NEXT_VARIANT (expr)))
13827 /* Allow these fields to be modified. */
13829 memcpy ((char *) &buf, expr, tree_size (expr));
13830 expr = tmp = (tree) &buf;
13831 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13832 TYPE_POINTER_TO (tmp) = NULL;
13833 TYPE_REFERENCE_TO (tmp) = NULL;
13834 TYPE_NEXT_VARIANT (tmp) = NULL;
13835 if (TYPE_CACHED_VALUES_P (tmp))
13837 TYPE_CACHED_VALUES_P (tmp) = 0;
13838 TYPE_CACHED_VALUES (tmp) = NULL;
13841 md5_process_bytes (expr, tree_size (expr), ctx);
13842 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13843 if (TREE_CODE_CLASS (code) != tcc_type
13844 && TREE_CODE_CLASS (code) != tcc_declaration
13845 && code != TREE_LIST
13846 && code != SSA_NAME)
13847 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13848 switch (TREE_CODE_CLASS (code))
13854 md5_process_bytes (TREE_STRING_POINTER (expr),
13855 TREE_STRING_LENGTH (expr), ctx);
13858 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13859 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13862 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13868 case tcc_exceptional:
13872 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13873 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13874 expr = TREE_CHAIN (expr);
13875 goto recursive_label;
13878 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13879 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13885 case tcc_expression:
13886 case tcc_reference:
13887 case tcc_comparison:
13890 case tcc_statement:
13892 len = TREE_OPERAND_LENGTH (expr);
13893 for (i = 0; i < len; ++i)
13894 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13896 case tcc_declaration:
13897 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13898 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13899 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13901 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13902 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13903 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13904 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13905 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13907 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13908 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13910 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13912 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13913 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13914 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13918 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13919 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13920 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13921 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13922 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13923 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13924 if (INTEGRAL_TYPE_P (expr)
13925 || SCALAR_FLOAT_TYPE_P (expr))
13927 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13928 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13930 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13931 if (TREE_CODE (expr) == RECORD_TYPE
13932 || TREE_CODE (expr) == UNION_TYPE
13933 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13934 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13935 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13942 /* Helper function for outputting the checksum of a tree T. When
13943 debugging with gdb, you can "define mynext" to be "next" followed
13944 by "call debug_fold_checksum (op0)", then just trace down till the
13947 DEBUG_FUNCTION void
13948 debug_fold_checksum (const_tree t)
13951 unsigned char checksum[16];
13952 struct md5_ctx ctx;
13953 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13955 md5_init_ctx (&ctx);
13956 fold_checksum_tree (t, &ctx, ht);
13957 md5_finish_ctx (&ctx, checksum);
13960 for (i = 0; i < 16; i++)
13961 fprintf (stderr, "%d ", checksum[i]);
13963 fprintf (stderr, "\n");
13968 /* Fold a unary tree expression with code CODE of type TYPE with an
13969 operand OP0. LOC is the location of the resulting expression.
13970 Return a folded expression if successful. Otherwise, return a tree
13971 expression with code CODE of type TYPE with an operand OP0. */
13974 fold_build1_stat_loc (location_t loc,
13975 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13978 #ifdef ENABLE_FOLD_CHECKING
13979 unsigned char checksum_before[16], checksum_after[16];
13980 struct md5_ctx ctx;
13983 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13984 md5_init_ctx (&ctx);
13985 fold_checksum_tree (op0, &ctx, ht);
13986 md5_finish_ctx (&ctx, checksum_before);
13990 tem = fold_unary_loc (loc, code, type, op0);
13992 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13994 #ifdef ENABLE_FOLD_CHECKING
13995 md5_init_ctx (&ctx);
13996 fold_checksum_tree (op0, &ctx, ht);
13997 md5_finish_ctx (&ctx, checksum_after);
14000 if (memcmp (checksum_before, checksum_after, 16))
14001 fold_check_failed (op0, tem);
14006 /* Fold a binary tree expression with code CODE of type TYPE with
14007 operands OP0 and OP1. LOC is the location of the resulting
14008 expression. Return a folded expression if successful. Otherwise,
14009 return a tree expression with code CODE of type TYPE with operands
14013 fold_build2_stat_loc (location_t loc,
14014 enum tree_code code, tree type, tree op0, tree op1
14018 #ifdef ENABLE_FOLD_CHECKING
14019 unsigned char checksum_before_op0[16],
14020 checksum_before_op1[16],
14021 checksum_after_op0[16],
14022 checksum_after_op1[16];
14023 struct md5_ctx ctx;
14026 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14027 md5_init_ctx (&ctx);
14028 fold_checksum_tree (op0, &ctx, ht);
14029 md5_finish_ctx (&ctx, checksum_before_op0);
14032 md5_init_ctx (&ctx);
14033 fold_checksum_tree (op1, &ctx, ht);
14034 md5_finish_ctx (&ctx, checksum_before_op1);
14038 tem = fold_binary_loc (loc, code, type, op0, op1);
14040 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14042 #ifdef ENABLE_FOLD_CHECKING
14043 md5_init_ctx (&ctx);
14044 fold_checksum_tree (op0, &ctx, ht);
14045 md5_finish_ctx (&ctx, checksum_after_op0);
14048 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14049 fold_check_failed (op0, tem);
14051 md5_init_ctx (&ctx);
14052 fold_checksum_tree (op1, &ctx, ht);
14053 md5_finish_ctx (&ctx, checksum_after_op1);
14056 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14057 fold_check_failed (op1, tem);
14062 /* Fold a ternary tree expression with code CODE of type TYPE with
14063 operands OP0, OP1, and OP2. Return a folded expression if
14064 successful. Otherwise, return a tree expression with code CODE of
14065 type TYPE with operands OP0, OP1, and OP2. */
14068 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14069 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14072 #ifdef ENABLE_FOLD_CHECKING
14073 unsigned char checksum_before_op0[16],
14074 checksum_before_op1[16],
14075 checksum_before_op2[16],
14076 checksum_after_op0[16],
14077 checksum_after_op1[16],
14078 checksum_after_op2[16];
14079 struct md5_ctx ctx;
14082 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14083 md5_init_ctx (&ctx);
14084 fold_checksum_tree (op0, &ctx, ht);
14085 md5_finish_ctx (&ctx, checksum_before_op0);
14088 md5_init_ctx (&ctx);
14089 fold_checksum_tree (op1, &ctx, ht);
14090 md5_finish_ctx (&ctx, checksum_before_op1);
14093 md5_init_ctx (&ctx);
14094 fold_checksum_tree (op2, &ctx, ht);
14095 md5_finish_ctx (&ctx, checksum_before_op2);
14099 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14100 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14102 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14104 #ifdef ENABLE_FOLD_CHECKING
14105 md5_init_ctx (&ctx);
14106 fold_checksum_tree (op0, &ctx, ht);
14107 md5_finish_ctx (&ctx, checksum_after_op0);
14110 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14111 fold_check_failed (op0, tem);
14113 md5_init_ctx (&ctx);
14114 fold_checksum_tree (op1, &ctx, ht);
14115 md5_finish_ctx (&ctx, checksum_after_op1);
14118 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14119 fold_check_failed (op1, tem);
14121 md5_init_ctx (&ctx);
14122 fold_checksum_tree (op2, &ctx, ht);
14123 md5_finish_ctx (&ctx, checksum_after_op2);
14126 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14127 fold_check_failed (op2, tem);
14132 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14133 arguments in ARGARRAY, and a null static chain.
14134 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14135 of type TYPE from the given operands as constructed by build_call_array. */
14138 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14139 int nargs, tree *argarray)
14142 #ifdef ENABLE_FOLD_CHECKING
14143 unsigned char checksum_before_fn[16],
14144 checksum_before_arglist[16],
14145 checksum_after_fn[16],
14146 checksum_after_arglist[16];
14147 struct md5_ctx ctx;
14151 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14152 md5_init_ctx (&ctx);
14153 fold_checksum_tree (fn, &ctx, ht);
14154 md5_finish_ctx (&ctx, checksum_before_fn);
14157 md5_init_ctx (&ctx);
14158 for (i = 0; i < nargs; i++)
14159 fold_checksum_tree (argarray[i], &ctx, ht);
14160 md5_finish_ctx (&ctx, checksum_before_arglist);
14164 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14166 #ifdef ENABLE_FOLD_CHECKING
14167 md5_init_ctx (&ctx);
14168 fold_checksum_tree (fn, &ctx, ht);
14169 md5_finish_ctx (&ctx, checksum_after_fn);
14172 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14173 fold_check_failed (fn, tem);
14175 md5_init_ctx (&ctx);
14176 for (i = 0; i < nargs; i++)
14177 fold_checksum_tree (argarray[i], &ctx, ht);
14178 md5_finish_ctx (&ctx, checksum_after_arglist);
14181 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14182 fold_check_failed (NULL_TREE, tem);
14187 /* Perform constant folding and related simplification of initializer
14188 expression EXPR. These behave identically to "fold_buildN" but ignore
14189 potential run-time traps and exceptions that fold must preserve. */
14191 #define START_FOLD_INIT \
14192 int saved_signaling_nans = flag_signaling_nans;\
14193 int saved_trapping_math = flag_trapping_math;\
14194 int saved_rounding_math = flag_rounding_math;\
14195 int saved_trapv = flag_trapv;\
14196 int saved_folding_initializer = folding_initializer;\
14197 flag_signaling_nans = 0;\
14198 flag_trapping_math = 0;\
14199 flag_rounding_math = 0;\
14201 folding_initializer = 1;
14203 #define END_FOLD_INIT \
14204 flag_signaling_nans = saved_signaling_nans;\
14205 flag_trapping_math = saved_trapping_math;\
14206 flag_rounding_math = saved_rounding_math;\
14207 flag_trapv = saved_trapv;\
14208 folding_initializer = saved_folding_initializer;
14211 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14212 tree type, tree op)
14217 result = fold_build1_loc (loc, code, type, op);
14224 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14225 tree type, tree op0, tree op1)
14230 result = fold_build2_loc (loc, code, type, op0, op1);
14237 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14238 tree type, tree op0, tree op1, tree op2)
14243 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14250 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14251 int nargs, tree *argarray)
14256 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14262 #undef START_FOLD_INIT
14263 #undef END_FOLD_INIT
14265 /* Determine if first argument is a multiple of second argument. Return 0 if
14266 it is not, or we cannot easily determined it to be.
14268 An example of the sort of thing we care about (at this point; this routine
14269 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14270 fold cases do now) is discovering that
14272 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14278 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14280 This code also handles discovering that
14282 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14284 is a multiple of 8 so we don't have to worry about dealing with a
14285 possible remainder.
14287 Note that we *look* inside a SAVE_EXPR only to determine how it was
14288 calculated; it is not safe for fold to do much of anything else with the
14289 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14290 at run time. For example, the latter example above *cannot* be implemented
14291 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14292 evaluation time of the original SAVE_EXPR is not necessarily the same at
14293 the time the new expression is evaluated. The only optimization of this
14294 sort that would be valid is changing
14296 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14300 SAVE_EXPR (I) * SAVE_EXPR (J)
14302 (where the same SAVE_EXPR (J) is used in the original and the
14303 transformed version). */
14306 multiple_of_p (tree type, const_tree top, const_tree bottom)
14308 if (operand_equal_p (top, bottom, 0))
14311 if (TREE_CODE (type) != INTEGER_TYPE)
14314 switch (TREE_CODE (top))
14317 /* Bitwise and provides a power of two multiple. If the mask is
14318 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14319 if (!integer_pow2p (bottom))
14324 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14325 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14329 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14330 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14333 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14337 op1 = TREE_OPERAND (top, 1);
14338 /* const_binop may not detect overflow correctly,
14339 so check for it explicitly here. */
14340 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14341 > TREE_INT_CST_LOW (op1)
14342 && TREE_INT_CST_HIGH (op1) == 0
14343 && 0 != (t1 = fold_convert (type,
14344 const_binop (LSHIFT_EXPR,
14347 && !TREE_OVERFLOW (t1))
14348 return multiple_of_p (type, t1, bottom);
14353 /* Can't handle conversions from non-integral or wider integral type. */
14354 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14355 || (TYPE_PRECISION (type)
14356 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14359 /* .. fall through ... */
14362 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14365 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14366 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14369 if (TREE_CODE (bottom) != INTEGER_CST
14370 || integer_zerop (bottom)
14371 || (TYPE_UNSIGNED (type)
14372 && (tree_int_cst_sgn (top) < 0
14373 || tree_int_cst_sgn (bottom) < 0)))
14375 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14383 /* Return true if CODE or TYPE is known to be non-negative. */
14386 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14388 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14389 && truth_value_p (code))
14390 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14391 have a signed:1 type (where the value is -1 and 0). */
14396 /* Return true if (CODE OP0) is known to be non-negative. If the return
14397 value is based on the assumption that signed overflow is undefined,
14398 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14399 *STRICT_OVERFLOW_P. */
14402 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14403 bool *strict_overflow_p)
14405 if (TYPE_UNSIGNED (type))
14411 /* We can't return 1 if flag_wrapv is set because
14412 ABS_EXPR<INT_MIN> = INT_MIN. */
14413 if (!INTEGRAL_TYPE_P (type))
14415 if (TYPE_OVERFLOW_UNDEFINED (type))
14417 *strict_overflow_p = true;
14422 case NON_LVALUE_EXPR:
14424 case FIX_TRUNC_EXPR:
14425 return tree_expr_nonnegative_warnv_p (op0,
14426 strict_overflow_p);
14430 tree inner_type = TREE_TYPE (op0);
14431 tree outer_type = type;
14433 if (TREE_CODE (outer_type) == REAL_TYPE)
14435 if (TREE_CODE (inner_type) == REAL_TYPE)
14436 return tree_expr_nonnegative_warnv_p (op0,
14437 strict_overflow_p);
14438 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14440 if (TYPE_UNSIGNED (inner_type))
14442 return tree_expr_nonnegative_warnv_p (op0,
14443 strict_overflow_p);
14446 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14448 if (TREE_CODE (inner_type) == REAL_TYPE)
14449 return tree_expr_nonnegative_warnv_p (op0,
14450 strict_overflow_p);
14451 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14452 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14453 && TYPE_UNSIGNED (inner_type);
14459 return tree_simple_nonnegative_warnv_p (code, type);
14462 /* We don't know sign of `t', so be conservative and return false. */
14466 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14467 value is based on the assumption that signed overflow is undefined,
14468 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14469 *STRICT_OVERFLOW_P. */
14472 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14473 tree op1, bool *strict_overflow_p)
14475 if (TYPE_UNSIGNED (type))
14480 case POINTER_PLUS_EXPR:
14482 if (FLOAT_TYPE_P (type))
14483 return (tree_expr_nonnegative_warnv_p (op0,
14485 && tree_expr_nonnegative_warnv_p (op1,
14486 strict_overflow_p));
14488 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14489 both unsigned and at least 2 bits shorter than the result. */
14490 if (TREE_CODE (type) == INTEGER_TYPE
14491 && TREE_CODE (op0) == NOP_EXPR
14492 && TREE_CODE (op1) == NOP_EXPR)
14494 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14495 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14496 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14497 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14499 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14500 TYPE_PRECISION (inner2)) + 1;
14501 return prec < TYPE_PRECISION (type);
14507 if (FLOAT_TYPE_P (type))
14509 /* x * x for floating point x is always non-negative. */
14510 if (operand_equal_p (op0, op1, 0))
14512 return (tree_expr_nonnegative_warnv_p (op0,
14514 && tree_expr_nonnegative_warnv_p (op1,
14515 strict_overflow_p));
14518 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14519 both unsigned and their total bits is shorter than the result. */
14520 if (TREE_CODE (type) == INTEGER_TYPE
14521 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14522 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14524 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14525 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14527 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14528 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14531 bool unsigned0 = TYPE_UNSIGNED (inner0);
14532 bool unsigned1 = TYPE_UNSIGNED (inner1);
14534 if (TREE_CODE (op0) == INTEGER_CST)
14535 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14537 if (TREE_CODE (op1) == INTEGER_CST)
14538 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14540 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14541 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14543 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14544 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14545 : TYPE_PRECISION (inner0);
14547 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14548 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14549 : TYPE_PRECISION (inner1);
14551 return precision0 + precision1 < TYPE_PRECISION (type);
14558 return (tree_expr_nonnegative_warnv_p (op0,
14560 || tree_expr_nonnegative_warnv_p (op1,
14561 strict_overflow_p));
14567 case TRUNC_DIV_EXPR:
14568 case CEIL_DIV_EXPR:
14569 case FLOOR_DIV_EXPR:
14570 case ROUND_DIV_EXPR:
14571 return (tree_expr_nonnegative_warnv_p (op0,
14573 && tree_expr_nonnegative_warnv_p (op1,
14574 strict_overflow_p));
14576 case TRUNC_MOD_EXPR:
14577 case CEIL_MOD_EXPR:
14578 case FLOOR_MOD_EXPR:
14579 case ROUND_MOD_EXPR:
14580 return tree_expr_nonnegative_warnv_p (op0,
14581 strict_overflow_p);
14583 return tree_simple_nonnegative_warnv_p (code, type);
14586 /* We don't know sign of `t', so be conservative and return false. */
14590 /* Return true if T is known to be non-negative. If the return
14591 value is based on the assumption that signed overflow is undefined,
14592 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14593 *STRICT_OVERFLOW_P. */
14596 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14598 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14601 switch (TREE_CODE (t))
14604 return tree_int_cst_sgn (t) >= 0;
14607 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14610 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14613 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14615 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14616 strict_overflow_p));
14618 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14621 /* We don't know sign of `t', so be conservative and return false. */
14625 /* Return true if T is known to be non-negative. If the return
14626 value is based on the assumption that signed overflow is undefined,
14627 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14628 *STRICT_OVERFLOW_P. */
14631 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14632 tree arg0, tree arg1, bool *strict_overflow_p)
14634 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14635 switch (DECL_FUNCTION_CODE (fndecl))
14637 CASE_FLT_FN (BUILT_IN_ACOS):
14638 CASE_FLT_FN (BUILT_IN_ACOSH):
14639 CASE_FLT_FN (BUILT_IN_CABS):
14640 CASE_FLT_FN (BUILT_IN_COSH):
14641 CASE_FLT_FN (BUILT_IN_ERFC):
14642 CASE_FLT_FN (BUILT_IN_EXP):
14643 CASE_FLT_FN (BUILT_IN_EXP10):
14644 CASE_FLT_FN (BUILT_IN_EXP2):
14645 CASE_FLT_FN (BUILT_IN_FABS):
14646 CASE_FLT_FN (BUILT_IN_FDIM):
14647 CASE_FLT_FN (BUILT_IN_HYPOT):
14648 CASE_FLT_FN (BUILT_IN_POW10):
14649 CASE_INT_FN (BUILT_IN_FFS):
14650 CASE_INT_FN (BUILT_IN_PARITY):
14651 CASE_INT_FN (BUILT_IN_POPCOUNT):
14652 case BUILT_IN_BSWAP32:
14653 case BUILT_IN_BSWAP64:
14657 CASE_FLT_FN (BUILT_IN_SQRT):
14658 /* sqrt(-0.0) is -0.0. */
14659 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14661 return tree_expr_nonnegative_warnv_p (arg0,
14662 strict_overflow_p);
14664 CASE_FLT_FN (BUILT_IN_ASINH):
14665 CASE_FLT_FN (BUILT_IN_ATAN):
14666 CASE_FLT_FN (BUILT_IN_ATANH):
14667 CASE_FLT_FN (BUILT_IN_CBRT):
14668 CASE_FLT_FN (BUILT_IN_CEIL):
14669 CASE_FLT_FN (BUILT_IN_ERF):
14670 CASE_FLT_FN (BUILT_IN_EXPM1):
14671 CASE_FLT_FN (BUILT_IN_FLOOR):
14672 CASE_FLT_FN (BUILT_IN_FMOD):
14673 CASE_FLT_FN (BUILT_IN_FREXP):
14674 CASE_FLT_FN (BUILT_IN_LCEIL):
14675 CASE_FLT_FN (BUILT_IN_LDEXP):
14676 CASE_FLT_FN (BUILT_IN_LFLOOR):
14677 CASE_FLT_FN (BUILT_IN_LLCEIL):
14678 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14679 CASE_FLT_FN (BUILT_IN_LLRINT):
14680 CASE_FLT_FN (BUILT_IN_LLROUND):
14681 CASE_FLT_FN (BUILT_IN_LRINT):
14682 CASE_FLT_FN (BUILT_IN_LROUND):
14683 CASE_FLT_FN (BUILT_IN_MODF):
14684 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14685 CASE_FLT_FN (BUILT_IN_RINT):
14686 CASE_FLT_FN (BUILT_IN_ROUND):
14687 CASE_FLT_FN (BUILT_IN_SCALB):
14688 CASE_FLT_FN (BUILT_IN_SCALBLN):
14689 CASE_FLT_FN (BUILT_IN_SCALBN):
14690 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14691 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14692 CASE_FLT_FN (BUILT_IN_SINH):
14693 CASE_FLT_FN (BUILT_IN_TANH):
14694 CASE_FLT_FN (BUILT_IN_TRUNC):
14695 /* True if the 1st argument is nonnegative. */
14696 return tree_expr_nonnegative_warnv_p (arg0,
14697 strict_overflow_p);
14699 CASE_FLT_FN (BUILT_IN_FMAX):
14700 /* True if the 1st OR 2nd arguments are nonnegative. */
14701 return (tree_expr_nonnegative_warnv_p (arg0,
14703 || (tree_expr_nonnegative_warnv_p (arg1,
14704 strict_overflow_p)));
14706 CASE_FLT_FN (BUILT_IN_FMIN):
14707 /* True if the 1st AND 2nd arguments are nonnegative. */
14708 return (tree_expr_nonnegative_warnv_p (arg0,
14710 && (tree_expr_nonnegative_warnv_p (arg1,
14711 strict_overflow_p)));
14713 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14714 /* True if the 2nd argument is nonnegative. */
14715 return tree_expr_nonnegative_warnv_p (arg1,
14716 strict_overflow_p);
14718 CASE_FLT_FN (BUILT_IN_POWI):
14719 /* True if the 1st argument is nonnegative or the second
14720 argument is an even integer. */
14721 if (TREE_CODE (arg1) == INTEGER_CST
14722 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14724 return tree_expr_nonnegative_warnv_p (arg0,
14725 strict_overflow_p);
14727 CASE_FLT_FN (BUILT_IN_POW):
14728 /* True if the 1st argument is nonnegative or the second
14729 argument is an even integer valued real. */
14730 if (TREE_CODE (arg1) == REAL_CST)
14735 c = TREE_REAL_CST (arg1);
14736 n = real_to_integer (&c);
14739 REAL_VALUE_TYPE cint;
14740 real_from_integer (&cint, VOIDmode, n,
14741 n < 0 ? -1 : 0, 0);
14742 if (real_identical (&c, &cint))
14746 return tree_expr_nonnegative_warnv_p (arg0,
14747 strict_overflow_p);
14752 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14756 /* Return true if T is known to be non-negative. If the return
14757 value is based on the assumption that signed overflow is undefined,
14758 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14759 *STRICT_OVERFLOW_P. */
14762 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14764 enum tree_code code = TREE_CODE (t);
14765 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14772 tree temp = TARGET_EXPR_SLOT (t);
14773 t = TARGET_EXPR_INITIAL (t);
14775 /* If the initializer is non-void, then it's a normal expression
14776 that will be assigned to the slot. */
14777 if (!VOID_TYPE_P (t))
14778 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14780 /* Otherwise, the initializer sets the slot in some way. One common
14781 way is an assignment statement at the end of the initializer. */
14784 if (TREE_CODE (t) == BIND_EXPR)
14785 t = expr_last (BIND_EXPR_BODY (t));
14786 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14787 || TREE_CODE (t) == TRY_CATCH_EXPR)
14788 t = expr_last (TREE_OPERAND (t, 0));
14789 else if (TREE_CODE (t) == STATEMENT_LIST)
14794 if (TREE_CODE (t) == MODIFY_EXPR
14795 && TREE_OPERAND (t, 0) == temp)
14796 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14797 strict_overflow_p);
14804 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14805 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14807 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14808 get_callee_fndecl (t),
14811 strict_overflow_p);
14813 case COMPOUND_EXPR:
14815 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14816 strict_overflow_p);
14818 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14819 strict_overflow_p);
14821 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14822 strict_overflow_p);
14825 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14829 /* We don't know sign of `t', so be conservative and return false. */
14833 /* Return true if T is known to be non-negative. If the return
14834 value is based on the assumption that signed overflow is undefined,
14835 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14836 *STRICT_OVERFLOW_P. */
14839 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14841 enum tree_code code;
14842 if (t == error_mark_node)
14845 code = TREE_CODE (t);
14846 switch (TREE_CODE_CLASS (code))
14849 case tcc_comparison:
14850 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14852 TREE_OPERAND (t, 0),
14853 TREE_OPERAND (t, 1),
14854 strict_overflow_p);
14857 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14859 TREE_OPERAND (t, 0),
14860 strict_overflow_p);
14863 case tcc_declaration:
14864 case tcc_reference:
14865 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14873 case TRUTH_AND_EXPR:
14874 case TRUTH_OR_EXPR:
14875 case TRUTH_XOR_EXPR:
14876 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14878 TREE_OPERAND (t, 0),
14879 TREE_OPERAND (t, 1),
14880 strict_overflow_p);
14881 case TRUTH_NOT_EXPR:
14882 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14884 TREE_OPERAND (t, 0),
14885 strict_overflow_p);
14892 case WITH_SIZE_EXPR:
14894 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14897 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14901 /* Return true if `t' is known to be non-negative. Handle warnings
14902 about undefined signed overflow. */
14905 tree_expr_nonnegative_p (tree t)
14907 bool ret, strict_overflow_p;
14909 strict_overflow_p = false;
14910 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14911 if (strict_overflow_p)
14912 fold_overflow_warning (("assuming signed overflow does not occur when "
14913 "determining that expression is always "
14915 WARN_STRICT_OVERFLOW_MISC);
14920 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14921 For floating point we further ensure that T is not denormal.
14922 Similar logic is present in nonzero_address in rtlanal.h.
14924 If the return value is based on the assumption that signed overflow
14925 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14926 change *STRICT_OVERFLOW_P. */
14929 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14930 bool *strict_overflow_p)
14935 return tree_expr_nonzero_warnv_p (op0,
14936 strict_overflow_p);
14940 tree inner_type = TREE_TYPE (op0);
14941 tree outer_type = type;
14943 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14944 && tree_expr_nonzero_warnv_p (op0,
14945 strict_overflow_p));
14949 case NON_LVALUE_EXPR:
14950 return tree_expr_nonzero_warnv_p (op0,
14951 strict_overflow_p);
14960 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14961 For floating point we further ensure that T is not denormal.
14962 Similar logic is present in nonzero_address in rtlanal.h.
14964 If the return value is based on the assumption that signed overflow
14965 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14966 change *STRICT_OVERFLOW_P. */
14969 tree_binary_nonzero_warnv_p (enum tree_code code,
14972 tree op1, bool *strict_overflow_p)
14974 bool sub_strict_overflow_p;
14977 case POINTER_PLUS_EXPR:
14979 if (TYPE_OVERFLOW_UNDEFINED (type))
14981 /* With the presence of negative values it is hard
14982 to say something. */
14983 sub_strict_overflow_p = false;
14984 if (!tree_expr_nonnegative_warnv_p (op0,
14985 &sub_strict_overflow_p)
14986 || !tree_expr_nonnegative_warnv_p (op1,
14987 &sub_strict_overflow_p))
14989 /* One of operands must be positive and the other non-negative. */
14990 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14991 overflows, on a twos-complement machine the sum of two
14992 nonnegative numbers can never be zero. */
14993 return (tree_expr_nonzero_warnv_p (op0,
14995 || tree_expr_nonzero_warnv_p (op1,
14996 strict_overflow_p));
15001 if (TYPE_OVERFLOW_UNDEFINED (type))
15003 if (tree_expr_nonzero_warnv_p (op0,
15005 && tree_expr_nonzero_warnv_p (op1,
15006 strict_overflow_p))
15008 *strict_overflow_p = true;
15015 sub_strict_overflow_p = false;
15016 if (tree_expr_nonzero_warnv_p (op0,
15017 &sub_strict_overflow_p)
15018 && tree_expr_nonzero_warnv_p (op1,
15019 &sub_strict_overflow_p))
15021 if (sub_strict_overflow_p)
15022 *strict_overflow_p = true;
15027 sub_strict_overflow_p = false;
15028 if (tree_expr_nonzero_warnv_p (op0,
15029 &sub_strict_overflow_p))
15031 if (sub_strict_overflow_p)
15032 *strict_overflow_p = true;
15034 /* When both operands are nonzero, then MAX must be too. */
15035 if (tree_expr_nonzero_warnv_p (op1,
15036 strict_overflow_p))
15039 /* MAX where operand 0 is positive is positive. */
15040 return tree_expr_nonnegative_warnv_p (op0,
15041 strict_overflow_p);
15043 /* MAX where operand 1 is positive is positive. */
15044 else if (tree_expr_nonzero_warnv_p (op1,
15045 &sub_strict_overflow_p)
15046 && tree_expr_nonnegative_warnv_p (op1,
15047 &sub_strict_overflow_p))
15049 if (sub_strict_overflow_p)
15050 *strict_overflow_p = true;
15056 return (tree_expr_nonzero_warnv_p (op1,
15058 || tree_expr_nonzero_warnv_p (op0,
15059 strict_overflow_p));
15068 /* Return true when T is an address and is known to be nonzero.
15069 For floating point we further ensure that T is not denormal.
15070 Similar logic is present in nonzero_address in rtlanal.h.
15072 If the return value is based on the assumption that signed overflow
15073 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15074 change *STRICT_OVERFLOW_P. */
15077 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15079 bool sub_strict_overflow_p;
15080 switch (TREE_CODE (t))
15083 return !integer_zerop (t);
15087 tree base = TREE_OPERAND (t, 0);
15088 if (!DECL_P (base))
15089 base = get_base_address (base);
15094 /* Weak declarations may link to NULL. Other things may also be NULL
15095 so protect with -fdelete-null-pointer-checks; but not variables
15096 allocated on the stack. */
15098 && (flag_delete_null_pointer_checks
15099 || (DECL_CONTEXT (base)
15100 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15101 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15102 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15104 /* Constants are never weak. */
15105 if (CONSTANT_CLASS_P (base))
15112 sub_strict_overflow_p = false;
15113 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15114 &sub_strict_overflow_p)
15115 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15116 &sub_strict_overflow_p))
15118 if (sub_strict_overflow_p)
15119 *strict_overflow_p = true;
15130 /* Return true when T is an address and is known to be nonzero.
15131 For floating point we further ensure that T is not denormal.
15132 Similar logic is present in nonzero_address in rtlanal.h.
15134 If the return value is based on the assumption that signed overflow
15135 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15136 change *STRICT_OVERFLOW_P. */
15139 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15141 tree type = TREE_TYPE (t);
15142 enum tree_code code;
15144 /* Doing something useful for floating point would need more work. */
15145 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15148 code = TREE_CODE (t);
15149 switch (TREE_CODE_CLASS (code))
15152 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15153 strict_overflow_p);
15155 case tcc_comparison:
15156 return tree_binary_nonzero_warnv_p (code, type,
15157 TREE_OPERAND (t, 0),
15158 TREE_OPERAND (t, 1),
15159 strict_overflow_p);
15161 case tcc_declaration:
15162 case tcc_reference:
15163 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15171 case TRUTH_NOT_EXPR:
15172 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15173 strict_overflow_p);
15175 case TRUTH_AND_EXPR:
15176 case TRUTH_OR_EXPR:
15177 case TRUTH_XOR_EXPR:
15178 return tree_binary_nonzero_warnv_p (code, type,
15179 TREE_OPERAND (t, 0),
15180 TREE_OPERAND (t, 1),
15181 strict_overflow_p);
15188 case WITH_SIZE_EXPR:
15190 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15192 case COMPOUND_EXPR:
15195 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15196 strict_overflow_p);
15199 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15200 strict_overflow_p);
15203 return alloca_call_p (t);
15211 /* Return true when T is an address and is known to be nonzero.
15212 Handle warnings about undefined signed overflow. */
15215 tree_expr_nonzero_p (tree t)
15217 bool ret, strict_overflow_p;
15219 strict_overflow_p = false;
15220 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15221 if (strict_overflow_p)
15222 fold_overflow_warning (("assuming signed overflow does not occur when "
15223 "determining that expression is always "
15225 WARN_STRICT_OVERFLOW_MISC);
15229 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15230 attempt to fold the expression to a constant without modifying TYPE,
15233 If the expression could be simplified to a constant, then return
15234 the constant. If the expression would not be simplified to a
15235 constant, then return NULL_TREE. */
15238 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15240 tree tem = fold_binary (code, type, op0, op1);
15241 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15244 /* Given the components of a unary expression CODE, TYPE and OP0,
15245 attempt to fold the expression to a constant without modifying
15248 If the expression could be simplified to a constant, then return
15249 the constant. If the expression would not be simplified to a
15250 constant, then return NULL_TREE. */
15253 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15255 tree tem = fold_unary (code, type, op0);
15256 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15259 /* If EXP represents referencing an element in a constant string
15260 (either via pointer arithmetic or array indexing), return the
15261 tree representing the value accessed, otherwise return NULL. */
15264 fold_read_from_constant_string (tree exp)
15266 if ((TREE_CODE (exp) == INDIRECT_REF
15267 || TREE_CODE (exp) == ARRAY_REF)
15268 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15270 tree exp1 = TREE_OPERAND (exp, 0);
15273 location_t loc = EXPR_LOCATION (exp);
15275 if (TREE_CODE (exp) == INDIRECT_REF)
15276 string = string_constant (exp1, &index);
15279 tree low_bound = array_ref_low_bound (exp);
15280 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15282 /* Optimize the special-case of a zero lower bound.
15284 We convert the low_bound to sizetype to avoid some problems
15285 with constant folding. (E.g. suppose the lower bound is 1,
15286 and its mode is QI. Without the conversion,l (ARRAY
15287 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15288 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15289 if (! integer_zerop (low_bound))
15290 index = size_diffop_loc (loc, index,
15291 fold_convert_loc (loc, sizetype, low_bound));
15297 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15298 && TREE_CODE (string) == STRING_CST
15299 && TREE_CODE (index) == INTEGER_CST
15300 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15301 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15303 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15304 return build_int_cst_type (TREE_TYPE (exp),
15305 (TREE_STRING_POINTER (string)
15306 [TREE_INT_CST_LOW (index)]));
15311 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15312 an integer constant, real, or fixed-point constant.
15314 TYPE is the type of the result. */
15317 fold_negate_const (tree arg0, tree type)
15319 tree t = NULL_TREE;
15321 switch (TREE_CODE (arg0))
15325 double_int val = tree_to_double_int (arg0);
15326 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15328 t = force_fit_type_double (type, val, 1,
15329 (overflow | TREE_OVERFLOW (arg0))
15330 && !TYPE_UNSIGNED (type));
15335 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15340 FIXED_VALUE_TYPE f;
15341 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15342 &(TREE_FIXED_CST (arg0)), NULL,
15343 TYPE_SATURATING (type));
15344 t = build_fixed (type, f);
15345 /* Propagate overflow flags. */
15346 if (overflow_p | TREE_OVERFLOW (arg0))
15347 TREE_OVERFLOW (t) = 1;
15352 gcc_unreachable ();
15358 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15359 an integer constant or real constant.
15361 TYPE is the type of the result. */
15364 fold_abs_const (tree arg0, tree type)
15366 tree t = NULL_TREE;
15368 switch (TREE_CODE (arg0))
15372 double_int val = tree_to_double_int (arg0);
15374 /* If the value is unsigned or non-negative, then the absolute value
15375 is the same as the ordinary value. */
15376 if (TYPE_UNSIGNED (type)
15377 || !double_int_negative_p (val))
15380 /* If the value is negative, then the absolute value is
15386 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15387 t = force_fit_type_double (type, val, -1,
15388 overflow | TREE_OVERFLOW (arg0));
15394 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15395 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15401 gcc_unreachable ();
15407 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15408 constant. TYPE is the type of the result. */
15411 fold_not_const (const_tree arg0, tree type)
15415 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15417 val = double_int_not (tree_to_double_int (arg0));
15418 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15421 /* Given CODE, a relational operator, the target type, TYPE and two
15422 constant operands OP0 and OP1, return the result of the
15423 relational operation. If the result is not a compile time
15424 constant, then return NULL_TREE. */
15427 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15429 int result, invert;
15431 /* From here on, the only cases we handle are when the result is
15432 known to be a constant. */
15434 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15436 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15437 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15439 /* Handle the cases where either operand is a NaN. */
15440 if (real_isnan (c0) || real_isnan (c1))
15450 case UNORDERED_EXPR:
15464 if (flag_trapping_math)
15470 gcc_unreachable ();
15473 return constant_boolean_node (result, type);
15476 return constant_boolean_node (real_compare (code, c0, c1), type);
15479 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15481 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15482 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15483 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15486 /* Handle equality/inequality of complex constants. */
15487 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15489 tree rcond = fold_relational_const (code, type,
15490 TREE_REALPART (op0),
15491 TREE_REALPART (op1));
15492 tree icond = fold_relational_const (code, type,
15493 TREE_IMAGPART (op0),
15494 TREE_IMAGPART (op1));
15495 if (code == EQ_EXPR)
15496 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15497 else if (code == NE_EXPR)
15498 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15503 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15505 To compute GT, swap the arguments and do LT.
15506 To compute GE, do LT and invert the result.
15507 To compute LE, swap the arguments, do LT and invert the result.
15508 To compute NE, do EQ and invert the result.
15510 Therefore, the code below must handle only EQ and LT. */
15512 if (code == LE_EXPR || code == GT_EXPR)
15517 code = swap_tree_comparison (code);
15520 /* Note that it is safe to invert for real values here because we
15521 have already handled the one case that it matters. */
15524 if (code == NE_EXPR || code == GE_EXPR)
15527 code = invert_tree_comparison (code, false);
15530 /* Compute a result for LT or EQ if args permit;
15531 Otherwise return T. */
15532 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15534 if (code == EQ_EXPR)
15535 result = tree_int_cst_equal (op0, op1);
15536 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15537 result = INT_CST_LT_UNSIGNED (op0, op1);
15539 result = INT_CST_LT (op0, op1);
15546 return constant_boolean_node (result, type);
15549 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15550 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15554 fold_build_cleanup_point_expr (tree type, tree expr)
15556 /* If the expression does not have side effects then we don't have to wrap
15557 it with a cleanup point expression. */
15558 if (!TREE_SIDE_EFFECTS (expr))
15561 /* If the expression is a return, check to see if the expression inside the
15562 return has no side effects or the right hand side of the modify expression
15563 inside the return. If either don't have side effects set we don't need to
15564 wrap the expression in a cleanup point expression. Note we don't check the
15565 left hand side of the modify because it should always be a return decl. */
15566 if (TREE_CODE (expr) == RETURN_EXPR)
15568 tree op = TREE_OPERAND (expr, 0);
15569 if (!op || !TREE_SIDE_EFFECTS (op))
15571 op = TREE_OPERAND (op, 1);
15572 if (!TREE_SIDE_EFFECTS (op))
15576 return build1 (CLEANUP_POINT_EXPR, type, expr);
15579 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15580 of an indirection through OP0, or NULL_TREE if no simplification is
15584 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15590 subtype = TREE_TYPE (sub);
15591 if (!POINTER_TYPE_P (subtype))
15594 if (TREE_CODE (sub) == ADDR_EXPR)
15596 tree op = TREE_OPERAND (sub, 0);
15597 tree optype = TREE_TYPE (op);
15598 /* *&CONST_DECL -> to the value of the const decl. */
15599 if (TREE_CODE (op) == CONST_DECL)
15600 return DECL_INITIAL (op);
15601 /* *&p => p; make sure to handle *&"str"[cst] here. */
15602 if (type == optype)
15604 tree fop = fold_read_from_constant_string (op);
15610 /* *(foo *)&fooarray => fooarray[0] */
15611 else if (TREE_CODE (optype) == ARRAY_TYPE
15612 && type == TREE_TYPE (optype)
15613 && (!in_gimple_form
15614 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15616 tree type_domain = TYPE_DOMAIN (optype);
15617 tree min_val = size_zero_node;
15618 if (type_domain && TYPE_MIN_VALUE (type_domain))
15619 min_val = TYPE_MIN_VALUE (type_domain);
15621 && TREE_CODE (min_val) != INTEGER_CST)
15623 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15624 NULL_TREE, NULL_TREE);
15626 /* *(foo *)&complexfoo => __real__ complexfoo */
15627 else if (TREE_CODE (optype) == COMPLEX_TYPE
15628 && type == TREE_TYPE (optype))
15629 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15630 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15631 else if (TREE_CODE (optype) == VECTOR_TYPE
15632 && type == TREE_TYPE (optype))
15634 tree part_width = TYPE_SIZE (type);
15635 tree index = bitsize_int (0);
15636 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15640 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15641 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15643 tree op00 = TREE_OPERAND (sub, 0);
15644 tree op01 = TREE_OPERAND (sub, 1);
15647 if (TREE_CODE (op00) == ADDR_EXPR)
15650 op00 = TREE_OPERAND (op00, 0);
15651 op00type = TREE_TYPE (op00);
15653 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15654 if (TREE_CODE (op00type) == VECTOR_TYPE
15655 && type == TREE_TYPE (op00type))
15657 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15658 tree part_width = TYPE_SIZE (type);
15659 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15660 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15661 tree index = bitsize_int (indexi);
15663 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15664 return fold_build3_loc (loc,
15665 BIT_FIELD_REF, type, op00,
15666 part_width, index);
15669 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15670 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15671 && type == TREE_TYPE (op00type))
15673 tree size = TYPE_SIZE_UNIT (type);
15674 if (tree_int_cst_equal (size, op01))
15675 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15677 /* ((foo *)&fooarray)[1] => fooarray[1] */
15678 else if (TREE_CODE (op00type) == ARRAY_TYPE
15679 && type == TREE_TYPE (op00type))
15681 tree type_domain = TYPE_DOMAIN (op00type);
15682 tree min_val = size_zero_node;
15683 if (type_domain && TYPE_MIN_VALUE (type_domain))
15684 min_val = TYPE_MIN_VALUE (type_domain);
15685 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15686 TYPE_SIZE_UNIT (type));
15687 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15688 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15689 NULL_TREE, NULL_TREE);
15694 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15695 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15696 && type == TREE_TYPE (TREE_TYPE (subtype))
15697 && (!in_gimple_form
15698 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15701 tree min_val = size_zero_node;
15702 sub = build_fold_indirect_ref_loc (loc, sub);
15703 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15704 if (type_domain && TYPE_MIN_VALUE (type_domain))
15705 min_val = TYPE_MIN_VALUE (type_domain);
15707 && TREE_CODE (min_val) != INTEGER_CST)
15709 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15716 /* Builds an expression for an indirection through T, simplifying some
15720 build_fold_indirect_ref_loc (location_t loc, tree t)
15722 tree type = TREE_TYPE (TREE_TYPE (t));
15723 tree sub = fold_indirect_ref_1 (loc, type, t);
15728 return build1_loc (loc, INDIRECT_REF, type, t);
15731 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15734 fold_indirect_ref_loc (location_t loc, tree t)
15736 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15744 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15745 whose result is ignored. The type of the returned tree need not be
15746 the same as the original expression. */
15749 fold_ignored_result (tree t)
15751 if (!TREE_SIDE_EFFECTS (t))
15752 return integer_zero_node;
15755 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15758 t = TREE_OPERAND (t, 0);
15762 case tcc_comparison:
15763 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15764 t = TREE_OPERAND (t, 0);
15765 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15766 t = TREE_OPERAND (t, 1);
15771 case tcc_expression:
15772 switch (TREE_CODE (t))
15774 case COMPOUND_EXPR:
15775 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15777 t = TREE_OPERAND (t, 0);
15781 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15782 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15784 t = TREE_OPERAND (t, 0);
15797 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15798 This can only be applied to objects of a sizetype. */
15801 round_up_loc (location_t loc, tree value, int divisor)
15803 tree div = NULL_TREE;
15805 gcc_assert (divisor > 0);
15809 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15810 have to do anything. Only do this when we are not given a const,
15811 because in that case, this check is more expensive than just
15813 if (TREE_CODE (value) != INTEGER_CST)
15815 div = build_int_cst (TREE_TYPE (value), divisor);
15817 if (multiple_of_p (TREE_TYPE (value), value, div))
15821 /* If divisor is a power of two, simplify this to bit manipulation. */
15822 if (divisor == (divisor & -divisor))
15824 if (TREE_CODE (value) == INTEGER_CST)
15826 double_int val = tree_to_double_int (value);
15829 if ((val.low & (divisor - 1)) == 0)
15832 overflow_p = TREE_OVERFLOW (value);
15833 val.low &= ~(divisor - 1);
15834 val.low += divisor;
15842 return force_fit_type_double (TREE_TYPE (value), val,
15849 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15850 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15851 t = build_int_cst (TREE_TYPE (value), -divisor);
15852 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15858 div = build_int_cst (TREE_TYPE (value), divisor);
15859 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15860 value = size_binop_loc (loc, MULT_EXPR, value, div);
15866 /* Likewise, but round down. */
15869 round_down_loc (location_t loc, tree value, int divisor)
15871 tree div = NULL_TREE;
15873 gcc_assert (divisor > 0);
15877 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15878 have to do anything. Only do this when we are not given a const,
15879 because in that case, this check is more expensive than just
15881 if (TREE_CODE (value) != INTEGER_CST)
15883 div = build_int_cst (TREE_TYPE (value), divisor);
15885 if (multiple_of_p (TREE_TYPE (value), value, div))
15889 /* If divisor is a power of two, simplify this to bit manipulation. */
15890 if (divisor == (divisor & -divisor))
15894 t = build_int_cst (TREE_TYPE (value), -divisor);
15895 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15900 div = build_int_cst (TREE_TYPE (value), divisor);
15901 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15902 value = size_binop_loc (loc, MULT_EXPR, value, div);
15908 /* Returns the pointer to the base of the object addressed by EXP and
15909 extracts the information about the offset of the access, storing it
15910 to PBITPOS and POFFSET. */
15913 split_address_to_core_and_offset (tree exp,
15914 HOST_WIDE_INT *pbitpos, tree *poffset)
15917 enum machine_mode mode;
15918 int unsignedp, volatilep;
15919 HOST_WIDE_INT bitsize;
15920 location_t loc = EXPR_LOCATION (exp);
15922 if (TREE_CODE (exp) == ADDR_EXPR)
15924 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15925 poffset, &mode, &unsignedp, &volatilep,
15927 core = build_fold_addr_expr_loc (loc, core);
15933 *poffset = NULL_TREE;
15939 /* Returns true if addresses of E1 and E2 differ by a constant, false
15940 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15943 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15946 HOST_WIDE_INT bitpos1, bitpos2;
15947 tree toffset1, toffset2, tdiff, type;
15949 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15950 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15952 if (bitpos1 % BITS_PER_UNIT != 0
15953 || bitpos2 % BITS_PER_UNIT != 0
15954 || !operand_equal_p (core1, core2, 0))
15957 if (toffset1 && toffset2)
15959 type = TREE_TYPE (toffset1);
15960 if (type != TREE_TYPE (toffset2))
15961 toffset2 = fold_convert (type, toffset2);
15963 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15964 if (!cst_and_fits_in_hwi (tdiff))
15967 *diff = int_cst_value (tdiff);
15969 else if (toffset1 || toffset2)
15971 /* If only one of the offsets is non-constant, the difference cannot
15978 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15982 /* Simplify the floating point expression EXP when the sign of the
15983 result is not significant. Return NULL_TREE if no simplification
15987 fold_strip_sign_ops (tree exp)
15990 location_t loc = EXPR_LOCATION (exp);
15992 switch (TREE_CODE (exp))
15996 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15997 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16001 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16003 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16004 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16005 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16006 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16007 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16008 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16011 case COMPOUND_EXPR:
16012 arg0 = TREE_OPERAND (exp, 0);
16013 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16015 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16019 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16020 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16022 return fold_build3_loc (loc,
16023 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16024 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16025 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16030 const enum built_in_function fcode = builtin_mathfn_code (exp);
16033 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16034 /* Strip copysign function call, return the 1st argument. */
16035 arg0 = CALL_EXPR_ARG (exp, 0);
16036 arg1 = CALL_EXPR_ARG (exp, 1);
16037 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16040 /* Strip sign ops from the argument of "odd" math functions. */
16041 if (negate_mathfn_p (fcode))
16043 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16045 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);