1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
65 #include "langhooks.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
71 int folding_initializer = 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code {
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree, int);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static int operand_equal_for_comparison_p (tree, tree, tree);
104 static int twoval_comparison_p (tree, tree *, tree *, int *);
105 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
106 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
107 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
108 static tree make_bit_field_ref (location_t, tree, tree,
109 HOST_WIDE_INT, HOST_WIDE_INT, int);
110 static tree optimize_bit_field_compare (location_t, enum tree_code,
112 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static int all_ones_mask_p (const_tree, int);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 extern tree make_range (tree, int *, tree *, tree *, bool *);
123 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 /* If ARG2 divides ARG1 with zero remainder, carries out the division
160 of type CODE and returns the quotient.
161 Otherwise returns NULL_TREE. */
164 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
169 /* The sign of the division is according to operand two, that
170 does the correct thing for POINTER_PLUS_EXPR where we want
171 a signed division. */
172 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
173 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
174 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
177 quo = double_int_divmod (tree_to_double_int (arg1),
178 tree_to_double_int (arg2),
181 if (double_int_zero_p (rem))
182 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
196 static int fold_deferring_overflow_warnings;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
229 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
238 if (fold_deferred_overflow_warning != NULL
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
248 if (!issue || warnmsg == NULL)
251 if (gimple_no_warning_p (stmt))
254 /* Use the smallest code level when deciding to issue the
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
259 if (!issue_strict_overflow_warning (code))
263 locus = input_location;
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 /* Stop deferring overflow warnings, ignoring any deferred
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL, 0);
278 /* Whether we are deferring overflow warnings. */
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings > 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 if (fold_deferring_overflow_warnings > 0)
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
309 negate_mathfn_p (enum built_in_function code)
313 CASE_FLT_FN (BUILT_IN_ASIN):
314 CASE_FLT_FN (BUILT_IN_ASINH):
315 CASE_FLT_FN (BUILT_IN_ATAN):
316 CASE_FLT_FN (BUILT_IN_ATANH):
317 CASE_FLT_FN (BUILT_IN_CASIN):
318 CASE_FLT_FN (BUILT_IN_CASINH):
319 CASE_FLT_FN (BUILT_IN_CATAN):
320 CASE_FLT_FN (BUILT_IN_CATANH):
321 CASE_FLT_FN (BUILT_IN_CBRT):
322 CASE_FLT_FN (BUILT_IN_CPROJ):
323 CASE_FLT_FN (BUILT_IN_CSIN):
324 CASE_FLT_FN (BUILT_IN_CSINH):
325 CASE_FLT_FN (BUILT_IN_CTAN):
326 CASE_FLT_FN (BUILT_IN_CTANH):
327 CASE_FLT_FN (BUILT_IN_ERF):
328 CASE_FLT_FN (BUILT_IN_LLROUND):
329 CASE_FLT_FN (BUILT_IN_LROUND):
330 CASE_FLT_FN (BUILT_IN_ROUND):
331 CASE_FLT_FN (BUILT_IN_SIN):
332 CASE_FLT_FN (BUILT_IN_SINH):
333 CASE_FLT_FN (BUILT_IN_TAN):
334 CASE_FLT_FN (BUILT_IN_TANH):
335 CASE_FLT_FN (BUILT_IN_TRUNC):
338 CASE_FLT_FN (BUILT_IN_LLRINT):
339 CASE_FLT_FN (BUILT_IN_LRINT):
340 CASE_FLT_FN (BUILT_IN_NEARBYINT):
341 CASE_FLT_FN (BUILT_IN_RINT):
342 return !flag_rounding_math;
350 /* Check whether we may negate an integer constant T without causing
354 may_negate_without_overflow_p (const_tree t)
356 unsigned HOST_WIDE_INT val;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
366 prec = TYPE_PRECISION (type);
367 if (prec > HOST_BITS_PER_WIDE_INT)
369 if (TREE_INT_CST_LOW (t) != 0)
371 prec -= HOST_BITS_PER_WIDE_INT;
372 val = TREE_INT_CST_HIGH (t);
375 val = TREE_INT_CST_LOW (t);
376 if (prec < HOST_BITS_PER_WIDE_INT)
377 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
378 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
381 /* Determine whether an expression T can be cheaply negated using
382 the function negate_expr without introducing undefined overflow. */
385 negate_expr_p (tree t)
392 type = TREE_TYPE (t);
395 switch (TREE_CODE (t))
398 if (TYPE_OVERFLOW_WRAPS (type))
401 /* Check that -CST will not overflow type. */
402 return may_negate_without_overflow_p (t);
404 return (INTEGRAL_TYPE_P (type)
405 && TYPE_OVERFLOW_WRAPS (type));
412 /* We want to canonicalize to positive real constants. Pretend
413 that only negative ones can be easily negated. */
414 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
417 return negate_expr_p (TREE_REALPART (t))
418 && negate_expr_p (TREE_IMAGPART (t));
421 return negate_expr_p (TREE_OPERAND (t, 0))
422 && negate_expr_p (TREE_OPERAND (t, 1));
425 return negate_expr_p (TREE_OPERAND (t, 0));
428 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
429 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
431 /* -(A + B) -> (-B) - A. */
432 if (negate_expr_p (TREE_OPERAND (t, 1))
433 && reorder_operands_p (TREE_OPERAND (t, 0),
434 TREE_OPERAND (t, 1)))
436 /* -(A + B) -> (-A) - B. */
437 return negate_expr_p (TREE_OPERAND (t, 0));
440 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
441 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
442 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
443 && reorder_operands_p (TREE_OPERAND (t, 0),
444 TREE_OPERAND (t, 1));
447 if (TYPE_UNSIGNED (TREE_TYPE (t)))
453 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
454 return negate_expr_p (TREE_OPERAND (t, 1))
455 || negate_expr_p (TREE_OPERAND (t, 0));
463 /* In general we can't negate A / B, because if A is INT_MIN and
464 B is 1, we may turn this into INT_MIN / -1 which is undefined
465 and actually traps on some architectures. But if overflow is
466 undefined, we can negate, because - (INT_MIN / 1) is an
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
471 return negate_expr_p (TREE_OPERAND (t, 1))
472 || negate_expr_p (TREE_OPERAND (t, 0));
475 /* Negate -((double)float) as (double)(-float). */
476 if (TREE_CODE (type) == REAL_TYPE)
478 tree tem = strip_float_extensions (t);
480 return negate_expr_p (tem);
485 /* Negate -f(x) as f(-x). */
486 if (negate_mathfn_p (builtin_mathfn_code (t)))
487 return negate_expr_p (CALL_EXPR_ARG (t, 0));
491 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
492 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
494 tree op1 = TREE_OPERAND (t, 1);
495 if (TREE_INT_CST_HIGH (op1) == 0
496 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
497 == TREE_INT_CST_LOW (op1))
508 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
509 simplification is possible.
510 If negate_expr_p would return true for T, NULL_TREE will never be
514 fold_negate_expr (location_t loc, tree t)
516 tree type = TREE_TYPE (t);
519 switch (TREE_CODE (t))
521 /* Convert - (~A) to A + 1. */
523 if (INTEGRAL_TYPE_P (type))
524 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
525 build_int_cst (type, 1));
529 tem = fold_negate_const (t, type);
530 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
531 || !TYPE_OVERFLOW_TRAPS (type))
536 tem = fold_negate_const (t, type);
537 /* Two's complement FP formats, such as c4x, may overflow. */
538 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 tem = fold_negate_const (t, type);
548 tree rpart = negate_expr (TREE_REALPART (t));
549 tree ipart = negate_expr (TREE_IMAGPART (t));
551 if ((TREE_CODE (rpart) == REAL_CST
552 && TREE_CODE (ipart) == REAL_CST)
553 || (TREE_CODE (rpart) == INTEGER_CST
554 && TREE_CODE (ipart) == INTEGER_CST))
555 return build_complex (type, rpart, ipart);
560 if (negate_expr_p (t))
561 return fold_build2_loc (loc, COMPLEX_EXPR, type,
562 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
563 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
567 if (negate_expr_p (t))
568 return fold_build1_loc (loc, CONJ_EXPR, type,
569 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
573 return TREE_OPERAND (t, 0);
576 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
577 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
579 /* -(A + B) -> (-B) - A. */
580 if (negate_expr_p (TREE_OPERAND (t, 1))
581 && reorder_operands_p (TREE_OPERAND (t, 0),
582 TREE_OPERAND (t, 1)))
584 tem = negate_expr (TREE_OPERAND (t, 1));
585 return fold_build2_loc (loc, MINUS_EXPR, type,
586 tem, TREE_OPERAND (t, 0));
589 /* -(A + B) -> (-A) - B. */
590 if (negate_expr_p (TREE_OPERAND (t, 0)))
592 tem = negate_expr (TREE_OPERAND (t, 0));
593 return fold_build2_loc (loc, MINUS_EXPR, type,
594 tem, TREE_OPERAND (t, 1));
600 /* - (A - B) -> B - A */
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
602 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
603 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
604 return fold_build2_loc (loc, MINUS_EXPR, type,
605 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
609 if (TYPE_UNSIGNED (type))
615 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
617 tem = TREE_OPERAND (t, 1);
618 if (negate_expr_p (tem))
619 return fold_build2_loc (loc, TREE_CODE (t), type,
620 TREE_OPERAND (t, 0), negate_expr (tem));
621 tem = TREE_OPERAND (t, 0);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 negate_expr (tem), TREE_OPERAND (t, 1));
633 /* In general we can't negate A / B, because if A is INT_MIN and
634 B is 1, we may turn this into INT_MIN / -1 which is undefined
635 and actually traps on some architectures. But if overflow is
636 undefined, we can negate, because - (INT_MIN / 1) is an
638 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
640 const char * const warnmsg = G_("assuming signed overflow does not "
641 "occur when negating a division");
642 tem = TREE_OPERAND (t, 1);
643 if (negate_expr_p (tem))
645 if (INTEGRAL_TYPE_P (type)
646 && (TREE_CODE (tem) != INTEGER_CST
647 || integer_onep (tem)))
648 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 TREE_OPERAND (t, 0), negate_expr (tem));
652 tem = TREE_OPERAND (t, 0);
653 if (negate_expr_p (tem))
655 if (INTEGRAL_TYPE_P (type)
656 && (TREE_CODE (tem) != INTEGER_CST
657 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
658 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
659 return fold_build2_loc (loc, TREE_CODE (t), type,
660 negate_expr (tem), TREE_OPERAND (t, 1));
666 /* Convert -((double)float) into (double)(-float). */
667 if (TREE_CODE (type) == REAL_TYPE)
669 tem = strip_float_extensions (t);
670 if (tem != t && negate_expr_p (tem))
671 return fold_convert_loc (loc, type, negate_expr (tem));
676 /* Negate -f(x) as f(-x). */
677 if (negate_mathfn_p (builtin_mathfn_code (t))
678 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
682 fndecl = get_callee_fndecl (t);
683 arg = negate_expr (CALL_EXPR_ARG (t, 0));
684 return build_call_expr_loc (loc, fndecl, 1, arg);
689 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
690 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
692 tree op1 = TREE_OPERAND (t, 1);
693 if (TREE_INT_CST_HIGH (op1) == 0
694 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
695 == TREE_INT_CST_LOW (op1))
697 tree ntype = TYPE_UNSIGNED (type)
698 ? signed_type_for (type)
699 : unsigned_type_for (type);
700 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
701 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
702 return fold_convert_loc (loc, type, temp);
714 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
715 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
727 loc = EXPR_LOCATION (t);
728 type = TREE_TYPE (t);
731 tem = fold_negate_expr (loc, t);
734 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
735 SET_EXPR_LOCATION (tem, loc);
737 return fold_convert_loc (loc, type, tem);
740 /* Split a tree IN into a constant, literal and variable parts that could be
741 combined with CODE to make IN. "constant" means an expression with
742 TREE_CONSTANT but that isn't an actual constant. CODE must be a
743 commutative arithmetic operation. Store the constant part into *CONP,
744 the literal in *LITP and return the variable part. If a part isn't
745 present, set it to null. If the tree does not decompose in this way,
746 return the entire tree as the variable part and the other parts as null.
748 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
749 case, we negate an operand that was subtracted. Except if it is a
750 literal for which we use *MINUS_LITP instead.
752 If NEGATE_P is true, we are negating all of IN, again except a literal
753 for which we use *MINUS_LITP instead.
755 If IN is itself a literal or constant, return it as appropriate.
757 Note that we do not guarantee that any of the three values will be the
758 same type as IN, but they will have the same signedness and mode. */
761 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
762 tree *minus_litp, int negate_p)
770 /* Strip any conversions that don't change the machine mode or signedness. */
771 STRIP_SIGN_NOPS (in);
773 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
774 || TREE_CODE (in) == FIXED_CST)
776 else if (TREE_CODE (in) == code
777 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
778 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
779 /* We can associate addition and subtraction together (even
780 though the C standard doesn't say so) for integers because
781 the value is not affected. For reals, the value might be
782 affected, so we can't. */
783 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
784 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
786 tree op0 = TREE_OPERAND (in, 0);
787 tree op1 = TREE_OPERAND (in, 1);
788 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
789 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
791 /* First see if either of the operands is a literal, then a constant. */
792 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
793 || TREE_CODE (op0) == FIXED_CST)
794 *litp = op0, op0 = 0;
795 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
796 || TREE_CODE (op1) == FIXED_CST)
797 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
799 if (op0 != 0 && TREE_CONSTANT (op0))
800 *conp = op0, op0 = 0;
801 else if (op1 != 0 && TREE_CONSTANT (op1))
802 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
804 /* If we haven't dealt with either operand, this is not a case we can
805 decompose. Otherwise, VAR is either of the ones remaining, if any. */
806 if (op0 != 0 && op1 != 0)
811 var = op1, neg_var_p = neg1_p;
813 /* Now do any needed negations. */
815 *minus_litp = *litp, *litp = 0;
817 *conp = negate_expr (*conp);
819 var = negate_expr (var);
821 else if (TREE_CONSTANT (in))
829 *minus_litp = *litp, *litp = 0;
830 else if (*minus_litp)
831 *litp = *minus_litp, *minus_litp = 0;
832 *conp = negate_expr (*conp);
833 var = negate_expr (var);
839 /* Re-associate trees split by the above function. T1 and T2 are
840 either expressions to associate or null. Return the new
841 expression, if any. LOC is the location of the new expression. If
842 we build an operation, do it in TYPE and with CODE. */
845 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
854 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
855 try to fold this since we will have infinite recursion. But do
856 deal with any NEGATE_EXPRs. */
857 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
858 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
860 if (code == PLUS_EXPR)
862 if (TREE_CODE (t1) == NEGATE_EXPR)
863 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
864 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
865 else if (TREE_CODE (t2) == NEGATE_EXPR)
866 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
867 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
868 else if (integer_zerop (t2))
869 return fold_convert_loc (loc, type, t1);
871 else if (code == MINUS_EXPR)
873 if (integer_zerop (t2))
874 return fold_convert_loc (loc, type, t1);
877 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
878 fold_convert_loc (loc, type, t2));
879 goto associate_trees_exit;
882 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type, t2));
884 associate_trees_exit:
885 protected_set_expr_location (tem, loc);
889 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
890 for use in int_const_binop, size_binop and size_diffop. */
893 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
895 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
897 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
912 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
913 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
914 && TYPE_MODE (type1) == TYPE_MODE (type2);
918 /* Combine two integer constants ARG1 and ARG2 under operation CODE
919 to produce a new constant. Return NULL_TREE if we don't know how
920 to evaluate CODE at compile-time.
922 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
925 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
927 unsigned HOST_WIDE_INT int1l, int2l;
928 HOST_WIDE_INT int1h, int2h;
929 unsigned HOST_WIDE_INT low;
931 unsigned HOST_WIDE_INT garbagel;
932 HOST_WIDE_INT garbageh;
934 tree type = TREE_TYPE (arg1);
935 int uns = TYPE_UNSIGNED (type);
937 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
940 int1l = TREE_INT_CST_LOW (arg1);
941 int1h = TREE_INT_CST_HIGH (arg1);
942 int2l = TREE_INT_CST_LOW (arg2);
943 int2h = TREE_INT_CST_HIGH (arg2);
948 low = int1l | int2l, hi = int1h | int2h;
952 low = int1l ^ int2l, hi = int1h ^ int2h;
956 low = int1l & int2l, hi = int1h & int2h;
962 /* It's unclear from the C standard whether shifts can overflow.
963 The following code ignores overflow; perhaps a C standard
964 interpretation ruling is needed. */
965 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
972 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
977 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
981 neg_double (int2l, int2h, &low, &hi);
982 add_double (int1l, int1h, low, hi, &low, &hi);
983 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
987 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
991 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
993 /* This is a shortcut for a common special case. */
994 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
995 && !TREE_OVERFLOW (arg1)
996 && !TREE_OVERFLOW (arg2)
997 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
999 if (code == CEIL_DIV_EXPR)
1002 low = int1l / int2l, hi = 0;
1006 /* ... fall through ... */
1008 case ROUND_DIV_EXPR:
1009 if (int2h == 0 && int2l == 0)
1011 if (int2h == 0 && int2l == 1)
1013 low = int1l, hi = int1h;
1016 if (int1l == int2l && int1h == int2h
1017 && ! (int1l == 0 && int1h == 0))
1022 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1023 &low, &hi, &garbagel, &garbageh);
1026 case TRUNC_MOD_EXPR:
1027 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1028 /* This is a shortcut for a common special case. */
1029 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1030 && !TREE_OVERFLOW (arg1)
1031 && !TREE_OVERFLOW (arg2)
1032 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1034 if (code == CEIL_MOD_EXPR)
1036 low = int1l % int2l, hi = 0;
1040 /* ... fall through ... */
1042 case ROUND_MOD_EXPR:
1043 if (int2h == 0 && int2l == 0)
1045 overflow = div_and_round_double (code, uns,
1046 int1l, int1h, int2l, int2h,
1047 &garbagel, &garbageh, &low, &hi);
1053 low = (((unsigned HOST_WIDE_INT) int1h
1054 < (unsigned HOST_WIDE_INT) int2h)
1055 || (((unsigned HOST_WIDE_INT) int1h
1056 == (unsigned HOST_WIDE_INT) int2h)
1059 low = (int1h < int2h
1060 || (int1h == int2h && int1l < int2l));
1062 if (low == (code == MIN_EXPR))
1063 low = int1l, hi = int1h;
1065 low = int2l, hi = int2h;
1074 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1076 /* Propagate overflow flags ourselves. */
1077 if (((!uns || is_sizetype) && overflow)
1078 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1081 TREE_OVERFLOW (t) = 1;
1085 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1086 ((!uns || is_sizetype) && overflow)
1087 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1092 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1093 constant. We assume ARG1 and ARG2 have the same data type, or at least
1094 are the same kind of constant and the same machine mode. Return zero if
1095 combining the constants is not allowed in the current operating mode.
1097 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1100 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1102 /* Sanity check for the recursive cases. */
1109 if (TREE_CODE (arg1) == INTEGER_CST)
1110 return int_const_binop (code, arg1, arg2, notrunc);
1112 if (TREE_CODE (arg1) == REAL_CST)
1114 enum machine_mode mode;
1117 REAL_VALUE_TYPE value;
1118 REAL_VALUE_TYPE result;
1122 /* The following codes are handled by real_arithmetic. */
1137 d1 = TREE_REAL_CST (arg1);
1138 d2 = TREE_REAL_CST (arg2);
1140 type = TREE_TYPE (arg1);
1141 mode = TYPE_MODE (type);
1143 /* Don't perform operation if we honor signaling NaNs and
1144 either operand is a NaN. */
1145 if (HONOR_SNANS (mode)
1146 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1149 /* Don't perform operation if it would raise a division
1150 by zero exception. */
1151 if (code == RDIV_EXPR
1152 && REAL_VALUES_EQUAL (d2, dconst0)
1153 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1156 /* If either operand is a NaN, just return it. Otherwise, set up
1157 for floating-point trap; we return an overflow. */
1158 if (REAL_VALUE_ISNAN (d1))
1160 else if (REAL_VALUE_ISNAN (d2))
1163 inexact = real_arithmetic (&value, code, &d1, &d2);
1164 real_convert (&result, mode, &value);
1166 /* Don't constant fold this floating point operation if
1167 the result has overflowed and flag_trapping_math. */
1168 if (flag_trapping_math
1169 && MODE_HAS_INFINITIES (mode)
1170 && REAL_VALUE_ISINF (result)
1171 && !REAL_VALUE_ISINF (d1)
1172 && !REAL_VALUE_ISINF (d2))
1175 /* Don't constant fold this floating point operation if the
1176 result may dependent upon the run-time rounding mode and
1177 flag_rounding_math is set, or if GCC's software emulation
1178 is unable to accurately represent the result. */
1179 if ((flag_rounding_math
1180 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1181 && (inexact || !real_identical (&result, &value)))
1184 t = build_real (type, result);
1186 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1190 if (TREE_CODE (arg1) == FIXED_CST)
1192 FIXED_VALUE_TYPE f1;
1193 FIXED_VALUE_TYPE f2;
1194 FIXED_VALUE_TYPE result;
1199 /* The following codes are handled by fixed_arithmetic. */
1205 case TRUNC_DIV_EXPR:
1206 f2 = TREE_FIXED_CST (arg2);
1211 f2.data.high = TREE_INT_CST_HIGH (arg2);
1212 f2.data.low = TREE_INT_CST_LOW (arg2);
1220 f1 = TREE_FIXED_CST (arg1);
1221 type = TREE_TYPE (arg1);
1222 sat_p = TYPE_SATURATING (type);
1223 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1224 t = build_fixed (type, result);
1225 /* Propagate overflow flags. */
1226 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1227 TREE_OVERFLOW (t) = 1;
1231 if (TREE_CODE (arg1) == COMPLEX_CST)
1233 tree type = TREE_TYPE (arg1);
1234 tree r1 = TREE_REALPART (arg1);
1235 tree i1 = TREE_IMAGPART (arg1);
1236 tree r2 = TREE_REALPART (arg2);
1237 tree i2 = TREE_IMAGPART (arg2);
1244 real = const_binop (code, r1, r2, notrunc);
1245 imag = const_binop (code, i1, i2, notrunc);
1249 if (COMPLEX_FLOAT_TYPE_P (type))
1250 return do_mpc_arg2 (arg1, arg2, type,
1251 /* do_nonfinite= */ folding_initializer,
1254 real = const_binop (MINUS_EXPR,
1255 const_binop (MULT_EXPR, r1, r2, notrunc),
1256 const_binop (MULT_EXPR, i1, i2, notrunc),
1258 imag = const_binop (PLUS_EXPR,
1259 const_binop (MULT_EXPR, r1, i2, notrunc),
1260 const_binop (MULT_EXPR, i1, r2, notrunc),
1265 if (COMPLEX_FLOAT_TYPE_P (type))
1266 return do_mpc_arg2 (arg1, arg2, type,
1267 /* do_nonfinite= */ folding_initializer,
1270 case TRUNC_DIV_EXPR:
1272 case FLOOR_DIV_EXPR:
1273 case ROUND_DIV_EXPR:
1274 if (flag_complex_method == 0)
1276 /* Keep this algorithm in sync with
1277 tree-complex.c:expand_complex_div_straight().
1279 Expand complex division to scalars, straightforward algorithm.
1280 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1284 = const_binop (PLUS_EXPR,
1285 const_binop (MULT_EXPR, r2, r2, notrunc),
1286 const_binop (MULT_EXPR, i2, i2, notrunc),
1289 = const_binop (PLUS_EXPR,
1290 const_binop (MULT_EXPR, r1, r2, notrunc),
1291 const_binop (MULT_EXPR, i1, i2, notrunc),
1294 = const_binop (MINUS_EXPR,
1295 const_binop (MULT_EXPR, i1, r2, notrunc),
1296 const_binop (MULT_EXPR, r1, i2, notrunc),
1299 real = const_binop (code, t1, magsquared, notrunc);
1300 imag = const_binop (code, t2, magsquared, notrunc);
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_wide().
1307 Expand complex division to scalars, modified algorithm to minimize
1308 overflow with wide input ranges. */
1309 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1310 fold_abs_const (r2, TREE_TYPE (type)),
1311 fold_abs_const (i2, TREE_TYPE (type)));
1313 if (integer_nonzerop (compare))
1315 /* In the TRUE branch, we compute
1317 div = (br * ratio) + bi;
1318 tr = (ar * ratio) + ai;
1319 ti = (ai * ratio) - ar;
1322 tree ratio = const_binop (code, r2, i2, notrunc);
1323 tree div = const_binop (PLUS_EXPR, i2,
1324 const_binop (MULT_EXPR, r2, ratio,
1327 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
1328 real = const_binop (PLUS_EXPR, real, i1, notrunc);
1329 real = const_binop (code, real, div, notrunc);
1331 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
1332 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
1333 imag = const_binop (code, imag, div, notrunc);
1337 /* In the FALSE branch, we compute
1339 divisor = (d * ratio) + c;
1340 tr = (b * ratio) + a;
1341 ti = b - (a * ratio);
1344 tree ratio = const_binop (code, i2, r2, notrunc);
1345 tree div = const_binop (PLUS_EXPR, r2,
1346 const_binop (MULT_EXPR, i2, ratio,
1350 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
1351 real = const_binop (PLUS_EXPR, real, r1, notrunc);
1352 real = const_binop (code, real, div, notrunc);
1354 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
1355 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
1356 imag = const_binop (code, imag, div, notrunc);
1366 return build_complex (type, real, imag);
1369 if (TREE_CODE (arg1) == VECTOR_CST)
1371 tree type = TREE_TYPE(arg1);
1372 int count = TYPE_VECTOR_SUBPARTS (type), i;
1373 tree elements1, elements2, list = NULL_TREE;
1375 if(TREE_CODE(arg2) != VECTOR_CST)
1378 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1379 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1381 for (i = 0; i < count; i++)
1383 tree elem1, elem2, elem;
1385 /* The trailing elements can be empty and should be treated as 0 */
1387 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1390 elem1 = TREE_VALUE(elements1);
1391 elements1 = TREE_CHAIN (elements1);
1395 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1398 elem2 = TREE_VALUE(elements2);
1399 elements2 = TREE_CHAIN (elements2);
1402 elem = const_binop (code, elem1, elem2, notrunc);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if(elem == NULL_TREE)
1409 list = tree_cons (NULL_TREE, elem, list);
1411 return build_vector(type, nreverse(list));
1416 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1417 indicates which particular sizetype to create. */
1420 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1422 return build_int_cst (sizetype_tab[(int) kind], number);
1425 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1426 is a tree code. The type of the result is taken from the operands.
1427 Both must be equivalent integer types, ala int_binop_types_match_p.
1428 If the operands are constant, so is the result. */
1431 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1433 tree type = TREE_TYPE (arg0);
1435 if (arg0 == error_mark_node || arg1 == error_mark_node)
1436 return error_mark_node;
1438 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1441 /* Handle the special case of two integer constants faster. */
1442 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1444 /* And some specific cases even faster than that. */
1445 if (code == PLUS_EXPR)
1447 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1449 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1452 else if (code == MINUS_EXPR)
1454 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1457 else if (code == MULT_EXPR)
1459 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1463 /* Handle general case of two integer constants. */
1464 return int_const_binop (code, arg0, arg1, 0);
1467 return fold_build2_loc (loc, code, type, arg0, arg1);
1470 /* Given two values, either both of sizetype or both of bitsizetype,
1471 compute the difference between the two values. Return the value
1472 in signed type corresponding to the type of the operands. */
1475 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1477 tree type = TREE_TYPE (arg0);
1480 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1483 /* If the type is already signed, just do the simple thing. */
1484 if (!TYPE_UNSIGNED (type))
1485 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1487 if (type == sizetype)
1489 else if (type == bitsizetype)
1490 ctype = sbitsizetype;
1492 ctype = signed_type_for (type);
1494 /* If either operand is not a constant, do the conversions to the signed
1495 type and subtract. The hardware will do the right thing with any
1496 overflow in the subtraction. */
1497 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1498 return size_binop_loc (loc, MINUS_EXPR,
1499 fold_convert_loc (loc, ctype, arg0),
1500 fold_convert_loc (loc, ctype, arg1));
1502 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1503 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1504 overflow) and negate (which can't either). Special-case a result
1505 of zero while we're here. */
1506 if (tree_int_cst_equal (arg0, arg1))
1507 return build_int_cst (ctype, 0);
1508 else if (tree_int_cst_lt (arg1, arg0))
1509 return fold_convert_loc (loc, ctype,
1510 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1512 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1513 fold_convert_loc (loc, ctype,
1514 size_binop_loc (loc,
1519 /* A subroutine of fold_convert_const handling conversions of an
1520 INTEGER_CST to another integer type. */
1523 fold_convert_const_int_from_int (tree type, const_tree arg1)
1527 /* Given an integer constant, make new constant with new type,
1528 appropriately sign-extended or truncated. */
1529 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1530 TREE_INT_CST_HIGH (arg1),
1531 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1532 (TREE_INT_CST_HIGH (arg1) < 0
1533 && (TYPE_UNSIGNED (type)
1534 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1535 | TREE_OVERFLOW (arg1));
1540 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1541 to an integer type. */
1544 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1549 /* The following code implements the floating point to integer
1550 conversion rules required by the Java Language Specification,
1551 that IEEE NaNs are mapped to zero and values that overflow
1552 the target precision saturate, i.e. values greater than
1553 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1554 are mapped to INT_MIN. These semantics are allowed by the
1555 C and C++ standards that simply state that the behavior of
1556 FP-to-integer conversion is unspecified upon overflow. */
1560 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1564 case FIX_TRUNC_EXPR:
1565 real_trunc (&r, VOIDmode, &x);
1572 /* If R is NaN, return zero and show we have an overflow. */
1573 if (REAL_VALUE_ISNAN (r))
1576 val = double_int_zero;
1579 /* See if R is less than the lower bound or greater than the
1584 tree lt = TYPE_MIN_VALUE (type);
1585 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1586 if (REAL_VALUES_LESS (r, l))
1589 val = tree_to_double_int (lt);
1595 tree ut = TYPE_MAX_VALUE (type);
1598 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1599 if (REAL_VALUES_LESS (u, r))
1602 val = tree_to_double_int (ut);
1608 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1610 t = force_fit_type_double (type, val.low, val.high, -1,
1611 overflow | TREE_OVERFLOW (arg1));
1615 /* A subroutine of fold_convert_const handling conversions of a
1616 FIXED_CST to an integer type. */
1619 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1622 double_int temp, temp_trunc;
1625 /* Right shift FIXED_CST to temp by fbit. */
1626 temp = TREE_FIXED_CST (arg1).data;
1627 mode = TREE_FIXED_CST (arg1).mode;
1628 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1630 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1631 HOST_BITS_PER_DOUBLE_INT,
1632 SIGNED_FIXED_POINT_MODE_P (mode));
1634 /* Left shift temp to temp_trunc by fbit. */
1635 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1636 HOST_BITS_PER_DOUBLE_INT,
1637 SIGNED_FIXED_POINT_MODE_P (mode));
1641 temp = double_int_zero;
1642 temp_trunc = double_int_zero;
1645 /* If FIXED_CST is negative, we need to round the value toward 0.
1646 By checking if the fractional bits are not zero to add 1 to temp. */
1647 if (SIGNED_FIXED_POINT_MODE_P (mode)
1648 && double_int_negative_p (temp_trunc)
1649 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1650 temp = double_int_add (temp, double_int_one);
1652 /* Given a fixed-point constant, make new constant with new type,
1653 appropriately sign-extended or truncated. */
1654 t = force_fit_type_double (type, temp.low, temp.high, -1,
1655 (double_int_negative_p (temp)
1656 && (TYPE_UNSIGNED (type)
1657 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1658 | TREE_OVERFLOW (arg1));
1663 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1664 to another floating point type. */
1667 fold_convert_const_real_from_real (tree type, const_tree arg1)
1669 REAL_VALUE_TYPE value;
1672 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1673 t = build_real (type, value);
1675 /* If converting an infinity or NAN to a representation that doesn't
1676 have one, set the overflow bit so that we can produce some kind of
1677 error message at the appropriate point if necessary. It's not the
1678 most user-friendly message, but it's better than nothing. */
1679 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1680 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1681 TREE_OVERFLOW (t) = 1;
1682 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1683 && !MODE_HAS_NANS (TYPE_MODE (type)))
1684 TREE_OVERFLOW (t) = 1;
1685 /* Regular overflow, conversion produced an infinity in a mode that
1686 can't represent them. */
1687 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1688 && REAL_VALUE_ISINF (value)
1689 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1690 TREE_OVERFLOW (t) = 1;
1692 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1696 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1697 to a floating point type. */
1700 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1702 REAL_VALUE_TYPE value;
1705 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1706 t = build_real (type, value);
1708 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1712 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1713 to another fixed-point type. */
1716 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1718 FIXED_VALUE_TYPE value;
1722 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1723 TYPE_SATURATING (type));
1724 t = build_fixed (type, value);
1726 /* Propagate overflow flags. */
1727 if (overflow_p | TREE_OVERFLOW (arg1))
1728 TREE_OVERFLOW (t) = 1;
1732 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1733 to a fixed-point type. */
1736 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1738 FIXED_VALUE_TYPE value;
1742 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1743 TREE_INT_CST (arg1),
1744 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1745 TYPE_SATURATING (type));
1746 t = build_fixed (type, value);
1748 /* Propagate overflow flags. */
1749 if (overflow_p | TREE_OVERFLOW (arg1))
1750 TREE_OVERFLOW (t) = 1;
1754 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1755 to a fixed-point type. */
1758 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1760 FIXED_VALUE_TYPE value;
1764 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1765 &TREE_REAL_CST (arg1),
1766 TYPE_SATURATING (type));
1767 t = build_fixed (type, value);
1769 /* Propagate overflow flags. */
1770 if (overflow_p | TREE_OVERFLOW (arg1))
1771 TREE_OVERFLOW (t) = 1;
1775 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1776 type TYPE. If no simplification can be done return NULL_TREE. */
1779 fold_convert_const (enum tree_code code, tree type, tree arg1)
1781 if (TREE_TYPE (arg1) == type)
1784 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1785 || TREE_CODE (type) == OFFSET_TYPE)
1787 if (TREE_CODE (arg1) == INTEGER_CST)
1788 return fold_convert_const_int_from_int (type, arg1);
1789 else if (TREE_CODE (arg1) == REAL_CST)
1790 return fold_convert_const_int_from_real (code, type, arg1);
1791 else if (TREE_CODE (arg1) == FIXED_CST)
1792 return fold_convert_const_int_from_fixed (type, arg1);
1794 else if (TREE_CODE (type) == REAL_TYPE)
1796 if (TREE_CODE (arg1) == INTEGER_CST)
1797 return build_real_from_int_cst (type, arg1);
1798 else if (TREE_CODE (arg1) == REAL_CST)
1799 return fold_convert_const_real_from_real (type, arg1);
1800 else if (TREE_CODE (arg1) == FIXED_CST)
1801 return fold_convert_const_real_from_fixed (type, arg1);
1803 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1805 if (TREE_CODE (arg1) == FIXED_CST)
1806 return fold_convert_const_fixed_from_fixed (type, arg1);
1807 else if (TREE_CODE (arg1) == INTEGER_CST)
1808 return fold_convert_const_fixed_from_int (type, arg1);
1809 else if (TREE_CODE (arg1) == REAL_CST)
1810 return fold_convert_const_fixed_from_real (type, arg1);
1815 /* Construct a vector of zero elements of vector type TYPE. */
1818 build_zero_vector (tree type)
1823 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1824 units = TYPE_VECTOR_SUBPARTS (type);
1827 for (i = 0; i < units; i++)
1828 list = tree_cons (NULL_TREE, elem, list);
1829 return build_vector (type, list);
1832 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1835 fold_convertible_p (const_tree type, const_tree arg)
1837 tree orig = TREE_TYPE (arg);
1842 if (TREE_CODE (arg) == ERROR_MARK
1843 || TREE_CODE (type) == ERROR_MARK
1844 || TREE_CODE (orig) == ERROR_MARK)
1847 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1850 switch (TREE_CODE (type))
1852 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1853 case POINTER_TYPE: case REFERENCE_TYPE:
1855 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1856 || TREE_CODE (orig) == OFFSET_TYPE)
1858 return (TREE_CODE (orig) == VECTOR_TYPE
1859 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1862 case FIXED_POINT_TYPE:
1866 return TREE_CODE (type) == TREE_CODE (orig);
1873 /* Convert expression ARG to type TYPE. Used by the middle-end for
1874 simple conversions in preference to calling the front-end's convert. */
1877 fold_convert_loc (location_t loc, tree type, tree arg)
1879 tree orig = TREE_TYPE (arg);
1885 if (TREE_CODE (arg) == ERROR_MARK
1886 || TREE_CODE (type) == ERROR_MARK
1887 || TREE_CODE (orig) == ERROR_MARK)
1888 return error_mark_node;
1890 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1891 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1893 switch (TREE_CODE (type))
1896 case REFERENCE_TYPE:
1897 /* Handle conversions between pointers to different address spaces. */
1898 if (POINTER_TYPE_P (orig)
1899 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1900 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1901 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1904 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1906 if (TREE_CODE (arg) == INTEGER_CST)
1908 tem = fold_convert_const (NOP_EXPR, type, arg);
1909 if (tem != NULL_TREE)
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1913 || TREE_CODE (orig) == OFFSET_TYPE)
1914 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1915 if (TREE_CODE (orig) == COMPLEX_TYPE)
1916 return fold_convert_loc (loc, type,
1917 fold_build1_loc (loc, REALPART_EXPR,
1918 TREE_TYPE (orig), arg));
1919 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1920 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1921 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1924 if (TREE_CODE (arg) == INTEGER_CST)
1926 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1927 if (tem != NULL_TREE)
1930 else if (TREE_CODE (arg) == REAL_CST)
1932 tem = fold_convert_const (NOP_EXPR, type, arg);
1933 if (tem != NULL_TREE)
1936 else if (TREE_CODE (arg) == FIXED_CST)
1938 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1939 if (tem != NULL_TREE)
1943 switch (TREE_CODE (orig))
1946 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1947 case POINTER_TYPE: case REFERENCE_TYPE:
1948 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1951 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1953 case FIXED_POINT_TYPE:
1954 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1957 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1958 return fold_convert_loc (loc, type, tem);
1964 case FIXED_POINT_TYPE:
1965 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1966 || TREE_CODE (arg) == REAL_CST)
1968 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1970 goto fold_convert_exit;
1973 switch (TREE_CODE (orig))
1975 case FIXED_POINT_TYPE:
1980 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1983 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1984 return fold_convert_loc (loc, type, tem);
1991 switch (TREE_CODE (orig))
1994 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1995 case POINTER_TYPE: case REFERENCE_TYPE:
1997 case FIXED_POINT_TYPE:
1998 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1999 fold_convert_loc (loc, TREE_TYPE (type), arg),
2000 fold_convert_loc (loc, TREE_TYPE (type),
2001 integer_zero_node));
2006 if (TREE_CODE (arg) == COMPLEX_EXPR)
2008 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2009 TREE_OPERAND (arg, 0));
2010 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2011 TREE_OPERAND (arg, 1));
2012 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2015 arg = save_expr (arg);
2016 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2017 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2018 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2019 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2020 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2028 if (integer_zerop (arg))
2029 return build_zero_vector (type);
2030 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2031 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2032 || TREE_CODE (orig) == VECTOR_TYPE);
2033 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2036 tem = fold_ignored_result (arg);
2037 if (TREE_CODE (tem) == MODIFY_EXPR)
2038 goto fold_convert_exit;
2039 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2045 protected_set_expr_location (tem, loc);
2049 /* Return false if expr can be assumed not to be an lvalue, true
2053 maybe_lvalue_p (const_tree x)
2055 /* We only need to wrap lvalue tree codes. */
2056 switch (TREE_CODE (x))
2067 case ALIGN_INDIRECT_REF:
2068 case MISALIGNED_INDIRECT_REF:
2070 case ARRAY_RANGE_REF:
2076 case PREINCREMENT_EXPR:
2077 case PREDECREMENT_EXPR:
2079 case TRY_CATCH_EXPR:
2080 case WITH_CLEANUP_EXPR:
2089 /* Assume the worst for front-end tree codes. */
2090 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2098 /* Return an expr equal to X but certainly not valid as an lvalue. */
2101 non_lvalue_loc (location_t loc, tree x)
2103 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2108 if (! maybe_lvalue_p (x))
2110 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2111 SET_EXPR_LOCATION (x, loc);
2115 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2116 Zero means allow extended lvalues. */
2118 int pedantic_lvalues;
2120 /* When pedantic, return an expr equal to X but certainly not valid as a
2121 pedantic lvalue. Otherwise, return X. */
2124 pedantic_non_lvalue_loc (location_t loc, tree x)
2126 if (pedantic_lvalues)
2127 return non_lvalue_loc (loc, x);
2128 protected_set_expr_location (x, loc);
2132 /* Given a tree comparison code, return the code that is the logical inverse
2133 of the given code. It is not safe to do this for floating-point
2134 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2135 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2138 invert_tree_comparison (enum tree_code code, bool honor_nans)
2140 if (honor_nans && flag_trapping_math)
2150 return honor_nans ? UNLE_EXPR : LE_EXPR;
2152 return honor_nans ? UNLT_EXPR : LT_EXPR;
2154 return honor_nans ? UNGE_EXPR : GE_EXPR;
2156 return honor_nans ? UNGT_EXPR : GT_EXPR;
2170 return UNORDERED_EXPR;
2171 case UNORDERED_EXPR:
2172 return ORDERED_EXPR;
2178 /* Similar, but return the comparison that results if the operands are
2179 swapped. This is safe for floating-point. */
2182 swap_tree_comparison (enum tree_code code)
2189 case UNORDERED_EXPR:
2215 /* Convert a comparison tree code from an enum tree_code representation
2216 into a compcode bit-based encoding. This function is the inverse of
2217 compcode_to_comparison. */
2219 static enum comparison_code
2220 comparison_to_compcode (enum tree_code code)
2237 return COMPCODE_ORD;
2238 case UNORDERED_EXPR:
2239 return COMPCODE_UNORD;
2241 return COMPCODE_UNLT;
2243 return COMPCODE_UNEQ;
2245 return COMPCODE_UNLE;
2247 return COMPCODE_UNGT;
2249 return COMPCODE_LTGT;
2251 return COMPCODE_UNGE;
2257 /* Convert a compcode bit-based encoding of a comparison operator back
2258 to GCC's enum tree_code representation. This function is the
2259 inverse of comparison_to_compcode. */
2261 static enum tree_code
2262 compcode_to_comparison (enum comparison_code code)
2279 return ORDERED_EXPR;
2280 case COMPCODE_UNORD:
2281 return UNORDERED_EXPR;
2299 /* Return a tree for the comparison which is the combination of
2300 doing the AND or OR (depending on CODE) of the two operations LCODE
2301 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2302 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2303 if this makes the transformation invalid. */
2306 combine_comparisons (location_t loc,
2307 enum tree_code code, enum tree_code lcode,
2308 enum tree_code rcode, tree truth_type,
2309 tree ll_arg, tree lr_arg)
2311 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2312 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2313 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2318 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2319 compcode = lcompcode & rcompcode;
2322 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2323 compcode = lcompcode | rcompcode;
2332 /* Eliminate unordered comparisons, as well as LTGT and ORD
2333 which are not used unless the mode has NaNs. */
2334 compcode &= ~COMPCODE_UNORD;
2335 if (compcode == COMPCODE_LTGT)
2336 compcode = COMPCODE_NE;
2337 else if (compcode == COMPCODE_ORD)
2338 compcode = COMPCODE_TRUE;
2340 else if (flag_trapping_math)
2342 /* Check that the original operation and the optimized ones will trap
2343 under the same condition. */
2344 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2345 && (lcompcode != COMPCODE_EQ)
2346 && (lcompcode != COMPCODE_ORD);
2347 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2348 && (rcompcode != COMPCODE_EQ)
2349 && (rcompcode != COMPCODE_ORD);
2350 bool trap = (compcode & COMPCODE_UNORD) == 0
2351 && (compcode != COMPCODE_EQ)
2352 && (compcode != COMPCODE_ORD);
2354 /* In a short-circuited boolean expression the LHS might be
2355 such that the RHS, if evaluated, will never trap. For
2356 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2357 if neither x nor y is NaN. (This is a mixed blessing: for
2358 example, the expression above will never trap, hence
2359 optimizing it to x < y would be invalid). */
2360 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2361 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2364 /* If the comparison was short-circuited, and only the RHS
2365 trapped, we may now generate a spurious trap. */
2367 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2370 /* If we changed the conditions that cause a trap, we lose. */
2371 if ((ltrap || rtrap) != trap)
2375 if (compcode == COMPCODE_TRUE)
2376 return constant_boolean_node (true, truth_type);
2377 else if (compcode == COMPCODE_FALSE)
2378 return constant_boolean_node (false, truth_type);
2381 enum tree_code tcode;
2383 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2384 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2388 /* Return nonzero if two operands (typically of the same tree node)
2389 are necessarily equal. If either argument has side-effects this
2390 function returns zero. FLAGS modifies behavior as follows:
2392 If OEP_ONLY_CONST is set, only return nonzero for constants.
2393 This function tests whether the operands are indistinguishable;
2394 it does not test whether they are equal using C's == operation.
2395 The distinction is important for IEEE floating point, because
2396 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2397 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2399 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2400 even though it may hold multiple values during a function.
2401 This is because a GCC tree node guarantees that nothing else is
2402 executed between the evaluation of its "operands" (which may often
2403 be evaluated in arbitrary order). Hence if the operands themselves
2404 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2405 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2406 unset means assuming isochronic (or instantaneous) tree equivalence.
2407 Unless comparing arbitrary expression trees, such as from different
2408 statements, this flag can usually be left unset.
2410 If OEP_PURE_SAME is set, then pure functions with identical arguments
2411 are considered the same. It is used when the caller has other ways
2412 to ensure that global memory is unchanged in between. */
2415 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2417 /* If either is ERROR_MARK, they aren't equal. */
2418 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2419 || TREE_TYPE (arg0) == error_mark_node
2420 || TREE_TYPE (arg1) == error_mark_node)
2423 /* Similar, if either does not have a type (like a released SSA name),
2424 they aren't equal. */
2425 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2428 /* Check equality of integer constants before bailing out due to
2429 precision differences. */
2430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2431 return tree_int_cst_equal (arg0, arg1);
2433 /* If both types don't have the same signedness, then we can't consider
2434 them equal. We must check this before the STRIP_NOPS calls
2435 because they may change the signedness of the arguments. As pointers
2436 strictly don't have a signedness, require either two pointers or
2437 two non-pointers as well. */
2438 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2439 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2442 /* We cannot consider pointers to different address space equal. */
2443 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2444 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2445 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2448 /* If both types don't have the same precision, then it is not safe
2450 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2456 /* In case both args are comparisons but with different comparison
2457 code, try to swap the comparison operands of one arg to produce
2458 a match and compare that variant. */
2459 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2460 && COMPARISON_CLASS_P (arg0)
2461 && COMPARISON_CLASS_P (arg1))
2463 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2465 if (TREE_CODE (arg0) == swap_code)
2466 return operand_equal_p (TREE_OPERAND (arg0, 0),
2467 TREE_OPERAND (arg1, 1), flags)
2468 && operand_equal_p (TREE_OPERAND (arg0, 1),
2469 TREE_OPERAND (arg1, 0), flags);
2472 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2473 /* This is needed for conversions and for COMPONENT_REF.
2474 Might as well play it safe and always test this. */
2475 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2476 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2477 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2480 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2481 We don't care about side effects in that case because the SAVE_EXPR
2482 takes care of that for us. In all other cases, two expressions are
2483 equal if they have no side effects. If we have two identical
2484 expressions with side effects that should be treated the same due
2485 to the only side effects being identical SAVE_EXPR's, that will
2486 be detected in the recursive calls below. */
2487 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2488 && (TREE_CODE (arg0) == SAVE_EXPR
2489 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2492 /* Next handle constant cases, those for which we can return 1 even
2493 if ONLY_CONST is set. */
2494 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2495 switch (TREE_CODE (arg0))
2498 return tree_int_cst_equal (arg0, arg1);
2501 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2502 TREE_FIXED_CST (arg1));
2505 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2506 TREE_REAL_CST (arg1)))
2510 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2512 /* If we do not distinguish between signed and unsigned zero,
2513 consider them equal. */
2514 if (real_zerop (arg0) && real_zerop (arg1))
2523 v1 = TREE_VECTOR_CST_ELTS (arg0);
2524 v2 = TREE_VECTOR_CST_ELTS (arg1);
2527 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2530 v1 = TREE_CHAIN (v1);
2531 v2 = TREE_CHAIN (v2);
2538 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2540 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2544 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2545 && ! memcmp (TREE_STRING_POINTER (arg0),
2546 TREE_STRING_POINTER (arg1),
2547 TREE_STRING_LENGTH (arg0)));
2550 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2556 if (flags & OEP_ONLY_CONST)
2559 /* Define macros to test an operand from arg0 and arg1 for equality and a
2560 variant that allows null and views null as being different from any
2561 non-null value. In the latter case, if either is null, the both
2562 must be; otherwise, do the normal comparison. */
2563 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2564 TREE_OPERAND (arg1, N), flags)
2566 #define OP_SAME_WITH_NULL(N) \
2567 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2568 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2570 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2573 /* Two conversions are equal only if signedness and modes match. */
2574 switch (TREE_CODE (arg0))
2577 case FIX_TRUNC_EXPR:
2578 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2579 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2589 case tcc_comparison:
2591 if (OP_SAME (0) && OP_SAME (1))
2594 /* For commutative ops, allow the other order. */
2595 return (commutative_tree_code (TREE_CODE (arg0))
2596 && operand_equal_p (TREE_OPERAND (arg0, 0),
2597 TREE_OPERAND (arg1, 1), flags)
2598 && operand_equal_p (TREE_OPERAND (arg0, 1),
2599 TREE_OPERAND (arg1, 0), flags));
2602 /* If either of the pointer (or reference) expressions we are
2603 dereferencing contain a side effect, these cannot be equal. */
2604 if (TREE_SIDE_EFFECTS (arg0)
2605 || TREE_SIDE_EFFECTS (arg1))
2608 switch (TREE_CODE (arg0))
2611 case ALIGN_INDIRECT_REF:
2612 case MISALIGNED_INDIRECT_REF:
2618 case ARRAY_RANGE_REF:
2619 /* Operands 2 and 3 may be null.
2620 Compare the array index by value if it is constant first as we
2621 may have different types but same value here. */
2623 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2624 TREE_OPERAND (arg1, 1))
2626 && OP_SAME_WITH_NULL (2)
2627 && OP_SAME_WITH_NULL (3));
2630 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2631 may be NULL when we're called to compare MEM_EXPRs. */
2632 return OP_SAME_WITH_NULL (0)
2634 && OP_SAME_WITH_NULL (2);
2637 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2643 case tcc_expression:
2644 switch (TREE_CODE (arg0))
2647 case TRUTH_NOT_EXPR:
2650 case TRUTH_ANDIF_EXPR:
2651 case TRUTH_ORIF_EXPR:
2652 return OP_SAME (0) && OP_SAME (1);
2654 case TRUTH_AND_EXPR:
2656 case TRUTH_XOR_EXPR:
2657 if (OP_SAME (0) && OP_SAME (1))
2660 /* Otherwise take into account this is a commutative operation. */
2661 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2662 TREE_OPERAND (arg1, 1), flags)
2663 && operand_equal_p (TREE_OPERAND (arg0, 1),
2664 TREE_OPERAND (arg1, 0), flags));
2667 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2674 switch (TREE_CODE (arg0))
2677 /* If the CALL_EXPRs call different functions, then they
2678 clearly can not be equal. */
2679 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2684 unsigned int cef = call_expr_flags (arg0);
2685 if (flags & OEP_PURE_SAME)
2686 cef &= ECF_CONST | ECF_PURE;
2693 /* Now see if all the arguments are the same. */
2695 const_call_expr_arg_iterator iter0, iter1;
2697 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2698 a1 = first_const_call_expr_arg (arg1, &iter1);
2700 a0 = next_const_call_expr_arg (&iter0),
2701 a1 = next_const_call_expr_arg (&iter1))
2702 if (! operand_equal_p (a0, a1, flags))
2705 /* If we get here and both argument lists are exhausted
2706 then the CALL_EXPRs are equal. */
2707 return ! (a0 || a1);
2713 case tcc_declaration:
2714 /* Consider __builtin_sqrt equal to sqrt. */
2715 return (TREE_CODE (arg0) == FUNCTION_DECL
2716 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2717 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2718 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2725 #undef OP_SAME_WITH_NULL
2728 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2729 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2731 When in doubt, return 0. */
2734 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2736 int unsignedp1, unsignedpo;
2737 tree primarg0, primarg1, primother;
2738 unsigned int correct_width;
2740 if (operand_equal_p (arg0, arg1, 0))
2743 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2744 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2747 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2748 and see if the inner values are the same. This removes any
2749 signedness comparison, which doesn't matter here. */
2750 primarg0 = arg0, primarg1 = arg1;
2751 STRIP_NOPS (primarg0);
2752 STRIP_NOPS (primarg1);
2753 if (operand_equal_p (primarg0, primarg1, 0))
2756 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2757 actual comparison operand, ARG0.
2759 First throw away any conversions to wider types
2760 already present in the operands. */
2762 primarg1 = get_narrower (arg1, &unsignedp1);
2763 primother = get_narrower (other, &unsignedpo);
2765 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2766 if (unsignedp1 == unsignedpo
2767 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2768 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2770 tree type = TREE_TYPE (arg0);
2772 /* Make sure shorter operand is extended the right way
2773 to match the longer operand. */
2774 primarg1 = fold_convert (signed_or_unsigned_type_for
2775 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2777 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2784 /* See if ARG is an expression that is either a comparison or is performing
2785 arithmetic on comparisons. The comparisons must only be comparing
2786 two different values, which will be stored in *CVAL1 and *CVAL2; if
2787 they are nonzero it means that some operands have already been found.
2788 No variables may be used anywhere else in the expression except in the
2789 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2790 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2792 If this is true, return 1. Otherwise, return zero. */
2795 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2797 enum tree_code code = TREE_CODE (arg);
2798 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2800 /* We can handle some of the tcc_expression cases here. */
2801 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2803 else if (tclass == tcc_expression
2804 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2805 || code == COMPOUND_EXPR))
2806 tclass = tcc_binary;
2808 else if (tclass == tcc_expression && code == SAVE_EXPR
2809 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2811 /* If we've already found a CVAL1 or CVAL2, this expression is
2812 two complex to handle. */
2813 if (*cval1 || *cval2)
2823 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2826 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2827 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2828 cval1, cval2, save_p));
2833 case tcc_expression:
2834 if (code == COND_EXPR)
2835 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2836 cval1, cval2, save_p)
2837 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2838 cval1, cval2, save_p)
2839 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2840 cval1, cval2, save_p));
2843 case tcc_comparison:
2844 /* First see if we can handle the first operand, then the second. For
2845 the second operand, we know *CVAL1 can't be zero. It must be that
2846 one side of the comparison is each of the values; test for the
2847 case where this isn't true by failing if the two operands
2850 if (operand_equal_p (TREE_OPERAND (arg, 0),
2851 TREE_OPERAND (arg, 1), 0))
2855 *cval1 = TREE_OPERAND (arg, 0);
2856 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2858 else if (*cval2 == 0)
2859 *cval2 = TREE_OPERAND (arg, 0);
2860 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2865 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2867 else if (*cval2 == 0)
2868 *cval2 = TREE_OPERAND (arg, 1);
2869 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2881 /* ARG is a tree that is known to contain just arithmetic operations and
2882 comparisons. Evaluate the operations in the tree substituting NEW0 for
2883 any occurrence of OLD0 as an operand of a comparison and likewise for
2887 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2888 tree old1, tree new1)
2890 tree type = TREE_TYPE (arg);
2891 enum tree_code code = TREE_CODE (arg);
2892 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2894 /* We can handle some of the tcc_expression cases here. */
2895 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2897 else if (tclass == tcc_expression
2898 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2899 tclass = tcc_binary;
2904 return fold_build1_loc (loc, code, type,
2905 eval_subst (loc, TREE_OPERAND (arg, 0),
2906 old0, new0, old1, new1));
2909 return fold_build2_loc (loc, code, type,
2910 eval_subst (loc, TREE_OPERAND (arg, 0),
2911 old0, new0, old1, new1),
2912 eval_subst (loc, TREE_OPERAND (arg, 1),
2913 old0, new0, old1, new1));
2915 case tcc_expression:
2919 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2923 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2927 return fold_build3_loc (loc, code, type,
2928 eval_subst (loc, TREE_OPERAND (arg, 0),
2929 old0, new0, old1, new1),
2930 eval_subst (loc, TREE_OPERAND (arg, 1),
2931 old0, new0, old1, new1),
2932 eval_subst (loc, TREE_OPERAND (arg, 2),
2933 old0, new0, old1, new1));
2937 /* Fall through - ??? */
2939 case tcc_comparison:
2941 tree arg0 = TREE_OPERAND (arg, 0);
2942 tree arg1 = TREE_OPERAND (arg, 1);
2944 /* We need to check both for exact equality and tree equality. The
2945 former will be true if the operand has a side-effect. In that
2946 case, we know the operand occurred exactly once. */
2948 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2950 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2953 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2955 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2958 return fold_build2_loc (loc, code, type, arg0, arg1);
2966 /* Return a tree for the case when the result of an expression is RESULT
2967 converted to TYPE and OMITTED was previously an operand of the expression
2968 but is now not needed (e.g., we folded OMITTED * 0).
2970 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2971 the conversion of RESULT to TYPE. */
2974 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2976 tree t = fold_convert_loc (loc, type, result);
2978 /* If the resulting operand is an empty statement, just return the omitted
2979 statement casted to void. */
2980 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2982 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2983 goto omit_one_operand_exit;
2986 if (TREE_SIDE_EFFECTS (omitted))
2988 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2989 goto omit_one_operand_exit;
2992 return non_lvalue_loc (loc, t);
2994 omit_one_operand_exit:
2995 protected_set_expr_location (t, loc);
2999 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3002 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3005 tree t = fold_convert_loc (loc, type, result);
3007 /* If the resulting operand is an empty statement, just return the omitted
3008 statement casted to void. */
3009 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3011 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3012 goto pedantic_omit_one_operand_exit;
3015 if (TREE_SIDE_EFFECTS (omitted))
3017 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3018 goto pedantic_omit_one_operand_exit;
3021 return pedantic_non_lvalue_loc (loc, t);
3023 pedantic_omit_one_operand_exit:
3024 protected_set_expr_location (t, loc);
3028 /* Return a tree for the case when the result of an expression is RESULT
3029 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3030 of the expression but are now not needed.
3032 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3033 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3034 evaluated before OMITTED2. Otherwise, if neither has side effects,
3035 just do the conversion of RESULT to TYPE. */
3038 omit_two_operands_loc (location_t loc, tree type, tree result,
3039 tree omitted1, tree omitted2)
3041 tree t = fold_convert_loc (loc, type, result);
3043 if (TREE_SIDE_EFFECTS (omitted2))
3045 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3046 SET_EXPR_LOCATION (t, loc);
3048 if (TREE_SIDE_EFFECTS (omitted1))
3050 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3051 SET_EXPR_LOCATION (t, loc);
3054 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3058 /* Return a simplified tree node for the truth-negation of ARG. This
3059 never alters ARG itself. We assume that ARG is an operation that
3060 returns a truth value (0 or 1).
3062 FIXME: one would think we would fold the result, but it causes
3063 problems with the dominator optimizer. */
3066 fold_truth_not_expr (location_t loc, tree arg)
3068 tree t, type = TREE_TYPE (arg);
3069 enum tree_code code = TREE_CODE (arg);
3070 location_t loc1, loc2;
3072 /* If this is a comparison, we can simply invert it, except for
3073 floating-point non-equality comparisons, in which case we just
3074 enclose a TRUTH_NOT_EXPR around what we have. */
3076 if (TREE_CODE_CLASS (code) == tcc_comparison)
3078 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3079 if (FLOAT_TYPE_P (op_type)
3080 && flag_trapping_math
3081 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3082 && code != NE_EXPR && code != EQ_EXPR)
3085 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3086 if (code == ERROR_MARK)
3089 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3090 SET_EXPR_LOCATION (t, loc);
3097 return constant_boolean_node (integer_zerop (arg), type);
3099 case TRUTH_AND_EXPR:
3100 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3101 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3102 if (loc1 == UNKNOWN_LOCATION)
3104 if (loc2 == UNKNOWN_LOCATION)
3106 t = build2 (TRUTH_OR_EXPR, type,
3107 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3108 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3112 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3113 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3114 if (loc1 == UNKNOWN_LOCATION)
3116 if (loc2 == UNKNOWN_LOCATION)
3118 t = build2 (TRUTH_AND_EXPR, type,
3119 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3120 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3123 case TRUTH_XOR_EXPR:
3124 /* Here we can invert either operand. We invert the first operand
3125 unless the second operand is a TRUTH_NOT_EXPR in which case our
3126 result is the XOR of the first operand with the inside of the
3127 negation of the second operand. */
3129 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3130 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3131 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3133 t = build2 (TRUTH_XOR_EXPR, type,
3134 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3135 TREE_OPERAND (arg, 1));
3138 case TRUTH_ANDIF_EXPR:
3139 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3140 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3141 if (loc1 == UNKNOWN_LOCATION)
3143 if (loc2 == UNKNOWN_LOCATION)
3145 t = build2 (TRUTH_ORIF_EXPR, type,
3146 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3147 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3150 case TRUTH_ORIF_EXPR:
3151 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3152 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3153 if (loc1 == UNKNOWN_LOCATION)
3155 if (loc2 == UNKNOWN_LOCATION)
3157 t = build2 (TRUTH_ANDIF_EXPR, type,
3158 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3159 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3162 case TRUTH_NOT_EXPR:
3163 return TREE_OPERAND (arg, 0);
3167 tree arg1 = TREE_OPERAND (arg, 1);
3168 tree arg2 = TREE_OPERAND (arg, 2);
3170 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3171 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3172 if (loc1 == UNKNOWN_LOCATION)
3174 if (loc2 == UNKNOWN_LOCATION)
3177 /* A COND_EXPR may have a throw as one operand, which
3178 then has void type. Just leave void operands
3180 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3181 VOID_TYPE_P (TREE_TYPE (arg1))
3182 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3183 VOID_TYPE_P (TREE_TYPE (arg2))
3184 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3189 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3190 if (loc1 == UNKNOWN_LOCATION)
3192 t = build2 (COMPOUND_EXPR, type,
3193 TREE_OPERAND (arg, 0),
3194 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3197 case NON_LVALUE_EXPR:
3198 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3199 if (loc1 == UNKNOWN_LOCATION)
3201 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3204 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3206 t = build1 (TRUTH_NOT_EXPR, type, arg);
3210 /* ... fall through ... */
3213 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3214 if (loc1 == UNKNOWN_LOCATION)
3216 t = build1 (TREE_CODE (arg), type,
3217 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3221 if (!integer_onep (TREE_OPERAND (arg, 1)))
3223 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3227 t = build1 (TRUTH_NOT_EXPR, type, arg);
3230 case CLEANUP_POINT_EXPR:
3231 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3232 if (loc1 == UNKNOWN_LOCATION)
3234 t = build1 (CLEANUP_POINT_EXPR, type,
3235 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3244 SET_EXPR_LOCATION (t, loc);
3249 /* Return a simplified tree node for the truth-negation of ARG. This
3250 never alters ARG itself. We assume that ARG is an operation that
3251 returns a truth value (0 or 1).
3253 FIXME: one would think we would fold the result, but it causes
3254 problems with the dominator optimizer. */
3257 invert_truthvalue_loc (location_t loc, tree arg)
3261 if (TREE_CODE (arg) == ERROR_MARK)
3264 tem = fold_truth_not_expr (loc, arg);
3267 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3268 SET_EXPR_LOCATION (tem, loc);
3274 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3275 operands are another bit-wise operation with a common input. If so,
3276 distribute the bit operations to save an operation and possibly two if
3277 constants are involved. For example, convert
3278 (A | B) & (A | C) into A | (B & C)
3279 Further simplification will occur if B and C are constants.
3281 If this optimization cannot be done, 0 will be returned. */
3284 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3285 tree arg0, tree arg1)
3290 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3291 || TREE_CODE (arg0) == code
3292 || (TREE_CODE (arg0) != BIT_AND_EXPR
3293 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3296 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3298 common = TREE_OPERAND (arg0, 0);
3299 left = TREE_OPERAND (arg0, 1);
3300 right = TREE_OPERAND (arg1, 1);
3302 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3304 common = TREE_OPERAND (arg0, 0);
3305 left = TREE_OPERAND (arg0, 1);
3306 right = TREE_OPERAND (arg1, 0);
3308 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3310 common = TREE_OPERAND (arg0, 1);
3311 left = TREE_OPERAND (arg0, 0);
3312 right = TREE_OPERAND (arg1, 1);
3314 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3316 common = TREE_OPERAND (arg0, 1);
3317 left = TREE_OPERAND (arg0, 0);
3318 right = TREE_OPERAND (arg1, 0);
3323 common = fold_convert_loc (loc, type, common);
3324 left = fold_convert_loc (loc, type, left);
3325 right = fold_convert_loc (loc, type, right);
3326 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3327 fold_build2_loc (loc, code, type, left, right));
3330 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3331 with code CODE. This optimization is unsafe. */
3333 distribute_real_division (location_t loc, enum tree_code code, tree type,
3334 tree arg0, tree arg1)
3336 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3337 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3339 /* (A / C) +- (B / C) -> (A +- B) / C. */
3341 && operand_equal_p (TREE_OPERAND (arg0, 1),
3342 TREE_OPERAND (arg1, 1), 0))
3343 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3344 fold_build2_loc (loc, code, type,
3345 TREE_OPERAND (arg0, 0),
3346 TREE_OPERAND (arg1, 0)),
3347 TREE_OPERAND (arg0, 1));
3349 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3350 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3351 TREE_OPERAND (arg1, 0), 0)
3352 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3353 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3355 REAL_VALUE_TYPE r0, r1;
3356 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3357 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3359 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3361 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3362 real_arithmetic (&r0, code, &r0, &r1);
3363 return fold_build2_loc (loc, MULT_EXPR, type,
3364 TREE_OPERAND (arg0, 0),
3365 build_real (type, r0));
3371 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3372 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3375 make_bit_field_ref (location_t loc, tree inner, tree type,
3376 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3378 tree result, bftype;
3382 tree size = TYPE_SIZE (TREE_TYPE (inner));
3383 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3384 || POINTER_TYPE_P (TREE_TYPE (inner)))
3385 && host_integerp (size, 0)
3386 && tree_low_cst (size, 0) == bitsize)
3387 return fold_convert_loc (loc, type, inner);
3391 if (TYPE_PRECISION (bftype) != bitsize
3392 || TYPE_UNSIGNED (bftype) == !unsignedp)
3393 bftype = build_nonstandard_integer_type (bitsize, 0);
3395 result = build3 (BIT_FIELD_REF, bftype, inner,
3396 size_int (bitsize), bitsize_int (bitpos));
3397 SET_EXPR_LOCATION (result, loc);
3400 result = fold_convert_loc (loc, type, result);
3405 /* Optimize a bit-field compare.
3407 There are two cases: First is a compare against a constant and the
3408 second is a comparison of two items where the fields are at the same
3409 bit position relative to the start of a chunk (byte, halfword, word)
3410 large enough to contain it. In these cases we can avoid the shift
3411 implicit in bitfield extractions.
3413 For constants, we emit a compare of the shifted constant with the
3414 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3415 compared. For two fields at the same position, we do the ANDs with the
3416 similar mask and compare the result of the ANDs.
3418 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3419 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3420 are the left and right operands of the comparison, respectively.
3422 If the optimization described above can be done, we return the resulting
3423 tree. Otherwise we return zero. */
3426 optimize_bit_field_compare (location_t loc, enum tree_code code,
3427 tree compare_type, tree lhs, tree rhs)
3429 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3430 tree type = TREE_TYPE (lhs);
3431 tree signed_type, unsigned_type;
3432 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3433 enum machine_mode lmode, rmode, nmode;
3434 int lunsignedp, runsignedp;
3435 int lvolatilep = 0, rvolatilep = 0;
3436 tree linner, rinner = NULL_TREE;
3440 /* Get all the information about the extractions being done. If the bit size
3441 if the same as the size of the underlying object, we aren't doing an
3442 extraction at all and so can do nothing. We also don't want to
3443 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3444 then will no longer be able to replace it. */
3445 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3446 &lunsignedp, &lvolatilep, false);
3447 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3448 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3453 /* If this is not a constant, we can only do something if bit positions,
3454 sizes, and signedness are the same. */
3455 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3456 &runsignedp, &rvolatilep, false);
3458 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3459 || lunsignedp != runsignedp || offset != 0
3460 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3464 /* See if we can find a mode to refer to this field. We should be able to,
3465 but fail if we can't. */
3467 && GET_MODE_BITSIZE (lmode) > 0
3468 && flag_strict_volatile_bitfields > 0)
3471 nmode = get_best_mode (lbitsize, lbitpos,
3472 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3473 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3474 TYPE_ALIGN (TREE_TYPE (rinner))),
3475 word_mode, lvolatilep || rvolatilep);
3476 if (nmode == VOIDmode)
3479 /* Set signed and unsigned types of the precision of this mode for the
3481 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3482 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3484 /* Compute the bit position and size for the new reference and our offset
3485 within it. If the new reference is the same size as the original, we
3486 won't optimize anything, so return zero. */
3487 nbitsize = GET_MODE_BITSIZE (nmode);
3488 nbitpos = lbitpos & ~ (nbitsize - 1);
3490 if (nbitsize == lbitsize)
3493 if (BYTES_BIG_ENDIAN)
3494 lbitpos = nbitsize - lbitsize - lbitpos;
3496 /* Make the mask to be used against the extracted field. */
3497 mask = build_int_cst_type (unsigned_type, -1);
3498 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3499 mask = const_binop (RSHIFT_EXPR, mask,
3500 size_int (nbitsize - lbitsize - lbitpos), 0);
3503 /* If not comparing with constant, just rework the comparison
3505 return fold_build2_loc (loc, code, compare_type,
3506 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3507 make_bit_field_ref (loc, linner,
3512 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3513 make_bit_field_ref (loc, rinner,
3519 /* Otherwise, we are handling the constant case. See if the constant is too
3520 big for the field. Warn and return a tree of for 0 (false) if so. We do
3521 this not only for its own sake, but to avoid having to test for this
3522 error case below. If we didn't, we might generate wrong code.
3524 For unsigned fields, the constant shifted right by the field length should
3525 be all zero. For signed fields, the high-order bits should agree with
3530 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3531 fold_convert_loc (loc,
3532 unsigned_type, rhs),
3533 size_int (lbitsize), 0)))
3535 warning (0, "comparison is always %d due to width of bit-field",
3537 return constant_boolean_node (code == NE_EXPR, compare_type);
3542 tree tem = const_binop (RSHIFT_EXPR,
3543 fold_convert_loc (loc, signed_type, rhs),
3544 size_int (lbitsize - 1), 0);
3545 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3547 warning (0, "comparison is always %d due to width of bit-field",
3549 return constant_boolean_node (code == NE_EXPR, compare_type);
3553 /* Single-bit compares should always be against zero. */
3554 if (lbitsize == 1 && ! integer_zerop (rhs))
3556 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3557 rhs = build_int_cst (type, 0);
3560 /* Make a new bitfield reference, shift the constant over the
3561 appropriate number of bits and mask it with the computed mask
3562 (in case this was a signed field). If we changed it, make a new one. */
3563 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3566 TREE_SIDE_EFFECTS (lhs) = 1;
3567 TREE_THIS_VOLATILE (lhs) = 1;
3570 rhs = const_binop (BIT_AND_EXPR,
3571 const_binop (LSHIFT_EXPR,
3572 fold_convert_loc (loc, unsigned_type, rhs),
3573 size_int (lbitpos), 0),
3576 lhs = build2 (code, compare_type,
3577 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3579 SET_EXPR_LOCATION (lhs, loc);
3583 /* Subroutine for fold_truthop: decode a field reference.
3585 If EXP is a comparison reference, we return the innermost reference.
3587 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3588 set to the starting bit number.
3590 If the innermost field can be completely contained in a mode-sized
3591 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3593 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3594 otherwise it is not changed.
3596 *PUNSIGNEDP is set to the signedness of the field.
3598 *PMASK is set to the mask used. This is either contained in a
3599 BIT_AND_EXPR or derived from the width of the field.
3601 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3603 Return 0 if this is not a component reference or is one that we can't
3604 do anything with. */
3607 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3608 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3609 int *punsignedp, int *pvolatilep,
3610 tree *pmask, tree *pand_mask)
3612 tree outer_type = 0;
3614 tree mask, inner, offset;
3616 unsigned int precision;
3618 /* All the optimizations using this function assume integer fields.
3619 There are problems with FP fields since the type_for_size call
3620 below can fail for, e.g., XFmode. */
3621 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3624 /* We are interested in the bare arrangement of bits, so strip everything
3625 that doesn't affect the machine mode. However, record the type of the
3626 outermost expression if it may matter below. */
3627 if (CONVERT_EXPR_P (exp)
3628 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3629 outer_type = TREE_TYPE (exp);
3632 if (TREE_CODE (exp) == BIT_AND_EXPR)
3634 and_mask = TREE_OPERAND (exp, 1);
3635 exp = TREE_OPERAND (exp, 0);
3636 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3637 if (TREE_CODE (and_mask) != INTEGER_CST)
3641 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3642 punsignedp, pvolatilep, false);
3643 if ((inner == exp && and_mask == 0)
3644 || *pbitsize < 0 || offset != 0
3645 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3648 /* If the number of bits in the reference is the same as the bitsize of
3649 the outer type, then the outer type gives the signedness. Otherwise
3650 (in case of a small bitfield) the signedness is unchanged. */
3651 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3652 *punsignedp = TYPE_UNSIGNED (outer_type);
3654 /* Compute the mask to access the bitfield. */
3655 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3656 precision = TYPE_PRECISION (unsigned_type);
3658 mask = build_int_cst_type (unsigned_type, -1);
3660 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3661 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3663 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3665 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3666 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3669 *pand_mask = and_mask;
3673 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3677 all_ones_mask_p (const_tree mask, int size)
3679 tree type = TREE_TYPE (mask);
3680 unsigned int precision = TYPE_PRECISION (type);
3683 tmask = build_int_cst_type (signed_type_for (type), -1);
3686 tree_int_cst_equal (mask,
3687 const_binop (RSHIFT_EXPR,
3688 const_binop (LSHIFT_EXPR, tmask,
3689 size_int (precision - size),
3691 size_int (precision - size), 0));
3694 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3695 represents the sign bit of EXP's type. If EXP represents a sign
3696 or zero extension, also test VAL against the unextended type.
3697 The return value is the (sub)expression whose sign bit is VAL,
3698 or NULL_TREE otherwise. */
3701 sign_bit_p (tree exp, const_tree val)
3703 unsigned HOST_WIDE_INT mask_lo, lo;
3704 HOST_WIDE_INT mask_hi, hi;
3708 /* Tree EXP must have an integral type. */
3709 t = TREE_TYPE (exp);
3710 if (! INTEGRAL_TYPE_P (t))
3713 /* Tree VAL must be an integer constant. */
3714 if (TREE_CODE (val) != INTEGER_CST
3715 || TREE_OVERFLOW (val))
3718 width = TYPE_PRECISION (t);
3719 if (width > HOST_BITS_PER_WIDE_INT)
3721 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3724 mask_hi = ((unsigned HOST_WIDE_INT) -1
3725 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3731 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3734 mask_lo = ((unsigned HOST_WIDE_INT) -1
3735 >> (HOST_BITS_PER_WIDE_INT - width));
3738 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3739 treat VAL as if it were unsigned. */
3740 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3741 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3744 /* Handle extension from a narrower type. */
3745 if (TREE_CODE (exp) == NOP_EXPR
3746 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3747 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3752 /* Subroutine for fold_truthop: determine if an operand is simple enough
3753 to be evaluated unconditionally. */
3756 simple_operand_p (const_tree exp)
3758 /* Strip any conversions that don't change the machine mode. */
3761 return (CONSTANT_CLASS_P (exp)
3762 || TREE_CODE (exp) == SSA_NAME
3764 && ! TREE_ADDRESSABLE (exp)
3765 && ! TREE_THIS_VOLATILE (exp)
3766 && ! DECL_NONLOCAL (exp)
3767 /* Don't regard global variables as simple. They may be
3768 allocated in ways unknown to the compiler (shared memory,
3769 #pragma weak, etc). */
3770 && ! TREE_PUBLIC (exp)
3771 && ! DECL_EXTERNAL (exp)
3772 /* Loading a static variable is unduly expensive, but global
3773 registers aren't expensive. */
3774 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3777 /* The following functions are subroutines to fold_range_test and allow it to
3778 try to change a logical combination of comparisons into a range test.
3781 X == 2 || X == 3 || X == 4 || X == 5
3785 (unsigned) (X - 2) <= 3
3787 We describe each set of comparisons as being either inside or outside
3788 a range, using a variable named like IN_P, and then describe the
3789 range with a lower and upper bound. If one of the bounds is omitted,
3790 it represents either the highest or lowest value of the type.
3792 In the comments below, we represent a range by two numbers in brackets
3793 preceded by a "+" to designate being inside that range, or a "-" to
3794 designate being outside that range, so the condition can be inverted by
3795 flipping the prefix. An omitted bound is represented by a "-". For
3796 example, "- [-, 10]" means being outside the range starting at the lowest
3797 possible value and ending at 10, in other words, being greater than 10.
3798 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3801 We set up things so that the missing bounds are handled in a consistent
3802 manner so neither a missing bound nor "true" and "false" need to be
3803 handled using a special case. */
3805 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3806 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3807 and UPPER1_P are nonzero if the respective argument is an upper bound
3808 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3809 must be specified for a comparison. ARG1 will be converted to ARG0's
3810 type if both are specified. */
3813 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3814 tree arg1, int upper1_p)
3820 /* If neither arg represents infinity, do the normal operation.
3821 Else, if not a comparison, return infinity. Else handle the special
3822 comparison rules. Note that most of the cases below won't occur, but
3823 are handled for consistency. */
3825 if (arg0 != 0 && arg1 != 0)
3827 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3828 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3830 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3833 if (TREE_CODE_CLASS (code) != tcc_comparison)
3836 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3837 for neither. In real maths, we cannot assume open ended ranges are
3838 the same. But, this is computer arithmetic, where numbers are finite.
3839 We can therefore make the transformation of any unbounded range with
3840 the value Z, Z being greater than any representable number. This permits
3841 us to treat unbounded ranges as equal. */
3842 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3843 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3847 result = sgn0 == sgn1;
3850 result = sgn0 != sgn1;
3853 result = sgn0 < sgn1;
3856 result = sgn0 <= sgn1;
3859 result = sgn0 > sgn1;
3862 result = sgn0 >= sgn1;
3868 return constant_boolean_node (result, type);
3871 /* Given EXP, a logical expression, set the range it is testing into
3872 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3873 actually being tested. *PLOW and *PHIGH will be made of the same
3874 type as the returned expression. If EXP is not a comparison, we
3875 will most likely not be returning a useful value and range. Set
3876 *STRICT_OVERFLOW_P to true if the return value is only valid
3877 because signed overflow is undefined; otherwise, do not change
3878 *STRICT_OVERFLOW_P. */
3881 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3882 bool *strict_overflow_p)
3884 enum tree_code code;
3885 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3886 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3888 tree low, high, n_low, n_high;
3889 location_t loc = EXPR_LOCATION (exp);
3891 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3892 and see if we can refine the range. Some of the cases below may not
3893 happen, but it doesn't seem worth worrying about this. We "continue"
3894 the outer loop when we've changed something; otherwise we "break"
3895 the switch, which will "break" the while. */
3898 low = high = build_int_cst (TREE_TYPE (exp), 0);
3902 code = TREE_CODE (exp);
3903 exp_type = TREE_TYPE (exp);
3905 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3907 if (TREE_OPERAND_LENGTH (exp) > 0)
3908 arg0 = TREE_OPERAND (exp, 0);
3909 if (TREE_CODE_CLASS (code) == tcc_comparison
3910 || TREE_CODE_CLASS (code) == tcc_unary
3911 || TREE_CODE_CLASS (code) == tcc_binary)
3912 arg0_type = TREE_TYPE (arg0);
3913 if (TREE_CODE_CLASS (code) == tcc_binary
3914 || TREE_CODE_CLASS (code) == tcc_comparison
3915 || (TREE_CODE_CLASS (code) == tcc_expression
3916 && TREE_OPERAND_LENGTH (exp) > 1))
3917 arg1 = TREE_OPERAND (exp, 1);
3922 case TRUTH_NOT_EXPR:
3923 in_p = ! in_p, exp = arg0;
3926 case EQ_EXPR: case NE_EXPR:
3927 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3928 /* We can only do something if the range is testing for zero
3929 and if the second operand is an integer constant. Note that
3930 saying something is "in" the range we make is done by
3931 complementing IN_P since it will set in the initial case of
3932 being not equal to zero; "out" is leaving it alone. */
3933 if (low == 0 || high == 0
3934 || ! integer_zerop (low) || ! integer_zerop (high)
3935 || TREE_CODE (arg1) != INTEGER_CST)
3940 case NE_EXPR: /* - [c, c] */
3943 case EQ_EXPR: /* + [c, c] */
3944 in_p = ! in_p, low = high = arg1;
3946 case GT_EXPR: /* - [-, c] */
3947 low = 0, high = arg1;
3949 case GE_EXPR: /* + [c, -] */
3950 in_p = ! in_p, low = arg1, high = 0;
3952 case LT_EXPR: /* - [c, -] */
3953 low = arg1, high = 0;
3955 case LE_EXPR: /* + [-, c] */
3956 in_p = ! in_p, low = 0, high = arg1;
3962 /* If this is an unsigned comparison, we also know that EXP is
3963 greater than or equal to zero. We base the range tests we make
3964 on that fact, so we record it here so we can parse existing
3965 range tests. We test arg0_type since often the return type
3966 of, e.g. EQ_EXPR, is boolean. */
3967 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3969 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3971 build_int_cst (arg0_type, 0),
3975 in_p = n_in_p, low = n_low, high = n_high;
3977 /* If the high bound is missing, but we have a nonzero low
3978 bound, reverse the range so it goes from zero to the low bound
3980 if (high == 0 && low && ! integer_zerop (low))
3983 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3984 integer_one_node, 0);
3985 low = build_int_cst (arg0_type, 0);
3993 /* (-x) IN [a,b] -> x in [-b, -a] */
3994 n_low = range_binop (MINUS_EXPR, exp_type,
3995 build_int_cst (exp_type, 0),
3997 n_high = range_binop (MINUS_EXPR, exp_type,
3998 build_int_cst (exp_type, 0),
4000 low = n_low, high = n_high;
4006 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4007 build_int_cst (exp_type, 1));
4008 SET_EXPR_LOCATION (exp, loc);
4011 case PLUS_EXPR: case MINUS_EXPR:
4012 if (TREE_CODE (arg1) != INTEGER_CST)
4015 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4016 move a constant to the other side. */
4017 if (!TYPE_UNSIGNED (arg0_type)
4018 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4021 /* If EXP is signed, any overflow in the computation is undefined,
4022 so we don't worry about it so long as our computations on
4023 the bounds don't overflow. For unsigned, overflow is defined
4024 and this is exactly the right thing. */
4025 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4026 arg0_type, low, 0, arg1, 0);
4027 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4028 arg0_type, high, 1, arg1, 0);
4029 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4030 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4033 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4034 *strict_overflow_p = true;
4036 /* Check for an unsigned range which has wrapped around the maximum
4037 value thus making n_high < n_low, and normalize it. */
4038 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4040 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4041 integer_one_node, 0);
4042 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4043 integer_one_node, 0);
4045 /* If the range is of the form +/- [ x+1, x ], we won't
4046 be able to normalize it. But then, it represents the
4047 whole range or the empty set, so make it
4049 if (tree_int_cst_equal (n_low, low)
4050 && tree_int_cst_equal (n_high, high))
4056 low = n_low, high = n_high;
4061 CASE_CONVERT: case NON_LVALUE_EXPR:
4062 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4065 if (! INTEGRAL_TYPE_P (arg0_type)
4066 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4067 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4070 n_low = low, n_high = high;
4073 n_low = fold_convert_loc (loc, arg0_type, n_low);
4076 n_high = fold_convert_loc (loc, arg0_type, n_high);
4079 /* If we're converting arg0 from an unsigned type, to exp,
4080 a signed type, we will be doing the comparison as unsigned.
4081 The tests above have already verified that LOW and HIGH
4084 So we have to ensure that we will handle large unsigned
4085 values the same way that the current signed bounds treat
4088 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4092 /* For fixed-point modes, we need to pass the saturating flag
4093 as the 2nd parameter. */
4094 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4095 equiv_type = lang_hooks.types.type_for_mode
4096 (TYPE_MODE (arg0_type),
4097 TYPE_SATURATING (arg0_type));
4099 equiv_type = lang_hooks.types.type_for_mode
4100 (TYPE_MODE (arg0_type), 1);
4102 /* A range without an upper bound is, naturally, unbounded.
4103 Since convert would have cropped a very large value, use
4104 the max value for the destination type. */
4106 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4107 : TYPE_MAX_VALUE (arg0_type);
4109 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4110 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4111 fold_convert_loc (loc, arg0_type,
4113 build_int_cst (arg0_type, 1));
4115 /* If the low bound is specified, "and" the range with the
4116 range for which the original unsigned value will be
4120 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4121 1, n_low, n_high, 1,
4122 fold_convert_loc (loc, arg0_type,
4127 in_p = (n_in_p == in_p);
4131 /* Otherwise, "or" the range with the range of the input
4132 that will be interpreted as negative. */
4133 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4134 0, n_low, n_high, 1,
4135 fold_convert_loc (loc, arg0_type,
4140 in_p = (in_p != n_in_p);
4145 low = n_low, high = n_high;
4155 /* If EXP is a constant, we can evaluate whether this is true or false. */
4156 if (TREE_CODE (exp) == INTEGER_CST)
4158 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4160 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4166 *pin_p = in_p, *plow = low, *phigh = high;
4170 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4171 type, TYPE, return an expression to test if EXP is in (or out of, depending
4172 on IN_P) the range. Return 0 if the test couldn't be created. */
4175 build_range_check (location_t loc, tree type, tree exp, int in_p,
4176 tree low, tree high)
4178 tree etype = TREE_TYPE (exp), value;
4180 #ifdef HAVE_canonicalize_funcptr_for_compare
4181 /* Disable this optimization for function pointer expressions
4182 on targets that require function pointer canonicalization. */
4183 if (HAVE_canonicalize_funcptr_for_compare
4184 && TREE_CODE (etype) == POINTER_TYPE
4185 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4191 value = build_range_check (loc, type, exp, 1, low, high);
4193 return invert_truthvalue_loc (loc, value);
4198 if (low == 0 && high == 0)
4199 return build_int_cst (type, 1);
4202 return fold_build2_loc (loc, LE_EXPR, type, exp,
4203 fold_convert_loc (loc, etype, high));
4206 return fold_build2_loc (loc, GE_EXPR, type, exp,
4207 fold_convert_loc (loc, etype, low));
4209 if (operand_equal_p (low, high, 0))
4210 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4211 fold_convert_loc (loc, etype, low));
4213 if (integer_zerop (low))
4215 if (! TYPE_UNSIGNED (etype))
4217 etype = unsigned_type_for (etype);
4218 high = fold_convert_loc (loc, etype, high);
4219 exp = fold_convert_loc (loc, etype, exp);
4221 return build_range_check (loc, type, exp, 1, 0, high);
4224 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4225 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4227 unsigned HOST_WIDE_INT lo;
4231 prec = TYPE_PRECISION (etype);
4232 if (prec <= HOST_BITS_PER_WIDE_INT)
4235 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4239 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4240 lo = (unsigned HOST_WIDE_INT) -1;
4243 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4245 if (TYPE_UNSIGNED (etype))
4247 tree signed_etype = signed_type_for (etype);
4248 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4250 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4252 etype = signed_etype;
4253 exp = fold_convert_loc (loc, etype, exp);
4255 return fold_build2_loc (loc, GT_EXPR, type, exp,
4256 build_int_cst (etype, 0));
4260 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4261 This requires wrap-around arithmetics for the type of the expression.
4262 First make sure that arithmetics in this type is valid, then make sure
4263 that it wraps around. */
4264 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4265 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4266 TYPE_UNSIGNED (etype));
4268 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4270 tree utype, minv, maxv;
4272 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4273 for the type in question, as we rely on this here. */
4274 utype = unsigned_type_for (etype);
4275 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4276 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4277 integer_one_node, 1);
4278 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4280 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4287 high = fold_convert_loc (loc, etype, high);
4288 low = fold_convert_loc (loc, etype, low);
4289 exp = fold_convert_loc (loc, etype, exp);
4291 value = const_binop (MINUS_EXPR, high, low, 0);
4294 if (POINTER_TYPE_P (etype))
4296 if (value != 0 && !TREE_OVERFLOW (value))
4298 low = fold_convert_loc (loc, sizetype, low);
4299 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4300 return build_range_check (loc, type,
4301 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4303 1, build_int_cst (etype, 0), value);
4308 if (value != 0 && !TREE_OVERFLOW (value))
4309 return build_range_check (loc, type,
4310 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4311 1, build_int_cst (etype, 0), value);
4316 /* Return the predecessor of VAL in its type, handling the infinite case. */
4319 range_predecessor (tree val)
4321 tree type = TREE_TYPE (val);
4323 if (INTEGRAL_TYPE_P (type)
4324 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4327 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4330 /* Return the successor of VAL in its type, handling the infinite case. */
4333 range_successor (tree val)
4335 tree type = TREE_TYPE (val);
4337 if (INTEGRAL_TYPE_P (type)
4338 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4341 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4344 /* Given two ranges, see if we can merge them into one. Return 1 if we
4345 can, 0 if we can't. Set the output range into the specified parameters. */
4348 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4349 tree high0, int in1_p, tree low1, tree high1)
4357 int lowequal = ((low0 == 0 && low1 == 0)
4358 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4359 low0, 0, low1, 0)));
4360 int highequal = ((high0 == 0 && high1 == 0)
4361 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4362 high0, 1, high1, 1)));
4364 /* Make range 0 be the range that starts first, or ends last if they
4365 start at the same value. Swap them if it isn't. */
4366 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4369 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4370 high1, 1, high0, 1))))
4372 temp = in0_p, in0_p = in1_p, in1_p = temp;
4373 tem = low0, low0 = low1, low1 = tem;
4374 tem = high0, high0 = high1, high1 = tem;
4377 /* Now flag two cases, whether the ranges are disjoint or whether the
4378 second range is totally subsumed in the first. Note that the tests
4379 below are simplified by the ones above. */
4380 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4381 high0, 1, low1, 0));
4382 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4383 high1, 1, high0, 1));
4385 /* We now have four cases, depending on whether we are including or
4386 excluding the two ranges. */
4389 /* If they don't overlap, the result is false. If the second range
4390 is a subset it is the result. Otherwise, the range is from the start
4391 of the second to the end of the first. */
4393 in_p = 0, low = high = 0;
4395 in_p = 1, low = low1, high = high1;
4397 in_p = 1, low = low1, high = high0;
4400 else if (in0_p && ! in1_p)
4402 /* If they don't overlap, the result is the first range. If they are
4403 equal, the result is false. If the second range is a subset of the
4404 first, and the ranges begin at the same place, we go from just after
4405 the end of the second range to the end of the first. If the second
4406 range is not a subset of the first, or if it is a subset and both
4407 ranges end at the same place, the range starts at the start of the
4408 first range and ends just before the second range.
4409 Otherwise, we can't describe this as a single range. */
4411 in_p = 1, low = low0, high = high0;
4412 else if (lowequal && highequal)
4413 in_p = 0, low = high = 0;
4414 else if (subset && lowequal)
4416 low = range_successor (high1);
4421 /* We are in the weird situation where high0 > high1 but
4422 high1 has no successor. Punt. */
4426 else if (! subset || highequal)
4429 high = range_predecessor (low1);
4433 /* low0 < low1 but low1 has no predecessor. Punt. */
4441 else if (! in0_p && in1_p)
4443 /* If they don't overlap, the result is the second range. If the second
4444 is a subset of the first, the result is false. Otherwise,
4445 the range starts just after the first range and ends at the
4446 end of the second. */
4448 in_p = 1, low = low1, high = high1;
4449 else if (subset || highequal)
4450 in_p = 0, low = high = 0;
4453 low = range_successor (high0);
4458 /* high1 > high0 but high0 has no successor. Punt. */
4466 /* The case where we are excluding both ranges. Here the complex case
4467 is if they don't overlap. In that case, the only time we have a
4468 range is if they are adjacent. If the second is a subset of the
4469 first, the result is the first. Otherwise, the range to exclude
4470 starts at the beginning of the first range and ends at the end of the
4474 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4475 range_successor (high0),
4477 in_p = 0, low = low0, high = high1;
4480 /* Canonicalize - [min, x] into - [-, x]. */
4481 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4482 switch (TREE_CODE (TREE_TYPE (low0)))
4485 if (TYPE_PRECISION (TREE_TYPE (low0))
4486 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4490 if (tree_int_cst_equal (low0,
4491 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4495 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4496 && integer_zerop (low0))
4503 /* Canonicalize - [x, max] into - [x, -]. */
4504 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4505 switch (TREE_CODE (TREE_TYPE (high1)))
4508 if (TYPE_PRECISION (TREE_TYPE (high1))
4509 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4513 if (tree_int_cst_equal (high1,
4514 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4518 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4519 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4521 integer_one_node, 1)))
4528 /* The ranges might be also adjacent between the maximum and
4529 minimum values of the given type. For
4530 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4531 return + [x + 1, y - 1]. */
4532 if (low0 == 0 && high1 == 0)
4534 low = range_successor (high0);
4535 high = range_predecessor (low1);
4536 if (low == 0 || high == 0)
4546 in_p = 0, low = low0, high = high0;
4548 in_p = 0, low = low0, high = high1;
4551 *pin_p = in_p, *plow = low, *phigh = high;
4556 /* Subroutine of fold, looking inside expressions of the form
4557 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4558 of the COND_EXPR. This function is being used also to optimize
4559 A op B ? C : A, by reversing the comparison first.
4561 Return a folded expression whose code is not a COND_EXPR
4562 anymore, or NULL_TREE if no folding opportunity is found. */
4565 fold_cond_expr_with_comparison (location_t loc, tree type,
4566 tree arg0, tree arg1, tree arg2)
4568 enum tree_code comp_code = TREE_CODE (arg0);
4569 tree arg00 = TREE_OPERAND (arg0, 0);
4570 tree arg01 = TREE_OPERAND (arg0, 1);
4571 tree arg1_type = TREE_TYPE (arg1);
4577 /* If we have A op 0 ? A : -A, consider applying the following
4580 A == 0? A : -A same as -A
4581 A != 0? A : -A same as A
4582 A >= 0? A : -A same as abs (A)
4583 A > 0? A : -A same as abs (A)
4584 A <= 0? A : -A same as -abs (A)
4585 A < 0? A : -A same as -abs (A)
4587 None of these transformations work for modes with signed
4588 zeros. If A is +/-0, the first two transformations will
4589 change the sign of the result (from +0 to -0, or vice
4590 versa). The last four will fix the sign of the result,
4591 even though the original expressions could be positive or
4592 negative, depending on the sign of A.
4594 Note that all these transformations are correct if A is
4595 NaN, since the two alternatives (A and -A) are also NaNs. */
4596 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4597 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4598 ? real_zerop (arg01)
4599 : integer_zerop (arg01))
4600 && ((TREE_CODE (arg2) == NEGATE_EXPR
4601 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4602 /* In the case that A is of the form X-Y, '-A' (arg2) may
4603 have already been folded to Y-X, check for that. */
4604 || (TREE_CODE (arg1) == MINUS_EXPR
4605 && TREE_CODE (arg2) == MINUS_EXPR
4606 && operand_equal_p (TREE_OPERAND (arg1, 0),
4607 TREE_OPERAND (arg2, 1), 0)
4608 && operand_equal_p (TREE_OPERAND (arg1, 1),
4609 TREE_OPERAND (arg2, 0), 0))))
4614 tem = fold_convert_loc (loc, arg1_type, arg1);
4615 return pedantic_non_lvalue_loc (loc,
4616 fold_convert_loc (loc, type,
4617 negate_expr (tem)));
4620 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4623 if (flag_trapping_math)
4628 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4629 arg1 = fold_convert_loc (loc, signed_type_for
4630 (TREE_TYPE (arg1)), arg1);
4631 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4632 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4635 if (flag_trapping_math)
4639 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4640 arg1 = fold_convert_loc (loc, signed_type_for
4641 (TREE_TYPE (arg1)), arg1);
4642 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4643 return negate_expr (fold_convert_loc (loc, type, tem));
4645 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4649 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4650 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4651 both transformations are correct when A is NaN: A != 0
4652 is then true, and A == 0 is false. */
4654 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4655 && integer_zerop (arg01) && integer_zerop (arg2))
4657 if (comp_code == NE_EXPR)
4658 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4659 else if (comp_code == EQ_EXPR)
4660 return build_int_cst (type, 0);
4663 /* Try some transformations of A op B ? A : B.
4665 A == B? A : B same as B
4666 A != B? A : B same as A
4667 A >= B? A : B same as max (A, B)
4668 A > B? A : B same as max (B, A)
4669 A <= B? A : B same as min (A, B)
4670 A < B? A : B same as min (B, A)
4672 As above, these transformations don't work in the presence
4673 of signed zeros. For example, if A and B are zeros of
4674 opposite sign, the first two transformations will change
4675 the sign of the result. In the last four, the original
4676 expressions give different results for (A=+0, B=-0) and
4677 (A=-0, B=+0), but the transformed expressions do not.
4679 The first two transformations are correct if either A or B
4680 is a NaN. In the first transformation, the condition will
4681 be false, and B will indeed be chosen. In the case of the
4682 second transformation, the condition A != B will be true,
4683 and A will be chosen.
4685 The conversions to max() and min() are not correct if B is
4686 a number and A is not. The conditions in the original
4687 expressions will be false, so all four give B. The min()
4688 and max() versions would give a NaN instead. */
4689 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4690 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4691 /* Avoid these transformations if the COND_EXPR may be used
4692 as an lvalue in the C++ front-end. PR c++/19199. */
4694 || (strcmp (lang_hooks.name, "GNU C++") != 0
4695 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4696 || ! maybe_lvalue_p (arg1)
4697 || ! maybe_lvalue_p (arg2)))
4699 tree comp_op0 = arg00;
4700 tree comp_op1 = arg01;
4701 tree comp_type = TREE_TYPE (comp_op0);
4703 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4704 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4714 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4716 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4721 /* In C++ a ?: expression can be an lvalue, so put the
4722 operand which will be used if they are equal first
4723 so that we can convert this back to the
4724 corresponding COND_EXPR. */
4725 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4727 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4728 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4729 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4730 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4731 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4732 comp_op1, comp_op0);
4733 return pedantic_non_lvalue_loc (loc,
4734 fold_convert_loc (loc, type, tem));
4741 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4743 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4744 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4745 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4746 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4747 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4748 comp_op1, comp_op0);
4749 return pedantic_non_lvalue_loc (loc,
4750 fold_convert_loc (loc, type, tem));
4754 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4755 return pedantic_non_lvalue_loc (loc,
4756 fold_convert_loc (loc, type, arg2));
4759 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4760 return pedantic_non_lvalue_loc (loc,
4761 fold_convert_loc (loc, type, arg1));
4764 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4769 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4770 we might still be able to simplify this. For example,
4771 if C1 is one less or one more than C2, this might have started
4772 out as a MIN or MAX and been transformed by this function.
4773 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4775 if (INTEGRAL_TYPE_P (type)
4776 && TREE_CODE (arg01) == INTEGER_CST
4777 && TREE_CODE (arg2) == INTEGER_CST)
4781 if (TREE_CODE (arg1) == INTEGER_CST)
4783 /* We can replace A with C1 in this case. */
4784 arg1 = fold_convert_loc (loc, type, arg01);
4785 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4788 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4789 MIN_EXPR, to preserve the signedness of the comparison. */
4790 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4792 && operand_equal_p (arg01,
4793 const_binop (PLUS_EXPR, arg2,
4794 build_int_cst (type, 1), 0),
4797 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4798 fold_convert_loc (loc, TREE_TYPE (arg00),
4800 return pedantic_non_lvalue_loc (loc,
4801 fold_convert_loc (loc, type, tem));
4806 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4808 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4810 && operand_equal_p (arg01,
4811 const_binop (MINUS_EXPR, arg2,
4812 build_int_cst (type, 1), 0),
4815 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4816 fold_convert_loc (loc, TREE_TYPE (arg00),
4818 return pedantic_non_lvalue_loc (loc,
4819 fold_convert_loc (loc, type, tem));
4824 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4825 MAX_EXPR, to preserve the signedness of the comparison. */
4826 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4828 && operand_equal_p (arg01,
4829 const_binop (MINUS_EXPR, arg2,
4830 build_int_cst (type, 1), 0),
4833 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4834 fold_convert_loc (loc, TREE_TYPE (arg00),
4836 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4841 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4842 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4844 && operand_equal_p (arg01,
4845 const_binop (PLUS_EXPR, arg2,
4846 build_int_cst (type, 1), 0),
4849 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4850 fold_convert_loc (loc, TREE_TYPE (arg00),
4852 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4866 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4867 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4868 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4872 /* EXP is some logical combination of boolean tests. See if we can
4873 merge it into some range test. Return the new tree if so. */
4876 fold_range_test (location_t loc, enum tree_code code, tree type,
4879 int or_op = (code == TRUTH_ORIF_EXPR
4880 || code == TRUTH_OR_EXPR);
4881 int in0_p, in1_p, in_p;
4882 tree low0, low1, low, high0, high1, high;
4883 bool strict_overflow_p = false;
4884 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4885 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4887 const char * const warnmsg = G_("assuming signed overflow does not occur "
4888 "when simplifying range test");
4890 /* If this is an OR operation, invert both sides; we will invert
4891 again at the end. */
4893 in0_p = ! in0_p, in1_p = ! in1_p;
4895 /* If both expressions are the same, if we can merge the ranges, and we
4896 can build the range test, return it or it inverted. If one of the
4897 ranges is always true or always false, consider it to be the same
4898 expression as the other. */
4899 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4900 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4902 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4904 : rhs != 0 ? rhs : integer_zero_node,
4907 if (strict_overflow_p)
4908 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4909 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4912 /* On machines where the branch cost is expensive, if this is a
4913 short-circuited branch and the underlying object on both sides
4914 is the same, make a non-short-circuit operation. */
4915 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4916 && lhs != 0 && rhs != 0
4917 && (code == TRUTH_ANDIF_EXPR
4918 || code == TRUTH_ORIF_EXPR)
4919 && operand_equal_p (lhs, rhs, 0))
4921 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4922 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4923 which cases we can't do this. */
4924 if (simple_operand_p (lhs))
4926 tem = build2 (code == TRUTH_ANDIF_EXPR
4927 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4929 SET_EXPR_LOCATION (tem, loc);
4933 else if (lang_hooks.decls.global_bindings_p () == 0
4934 && ! CONTAINS_PLACEHOLDER_P (lhs))
4936 tree common = save_expr (lhs);
4938 if (0 != (lhs = build_range_check (loc, type, common,
4939 or_op ? ! in0_p : in0_p,
4941 && (0 != (rhs = build_range_check (loc, type, common,
4942 or_op ? ! in1_p : in1_p,
4945 if (strict_overflow_p)
4946 fold_overflow_warning (warnmsg,
4947 WARN_STRICT_OVERFLOW_COMPARISON);
4948 tem = build2 (code == TRUTH_ANDIF_EXPR
4949 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4951 SET_EXPR_LOCATION (tem, loc);
4960 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4961 bit value. Arrange things so the extra bits will be set to zero if and
4962 only if C is signed-extended to its full width. If MASK is nonzero,
4963 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4966 unextend (tree c, int p, int unsignedp, tree mask)
4968 tree type = TREE_TYPE (c);
4969 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4972 if (p == modesize || unsignedp)
4975 /* We work by getting just the sign bit into the low-order bit, then
4976 into the high-order bit, then sign-extend. We then XOR that value
4978 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4979 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4981 /* We must use a signed type in order to get an arithmetic right shift.
4982 However, we must also avoid introducing accidental overflows, so that
4983 a subsequent call to integer_zerop will work. Hence we must
4984 do the type conversion here. At this point, the constant is either
4985 zero or one, and the conversion to a signed type can never overflow.
4986 We could get an overflow if this conversion is done anywhere else. */
4987 if (TYPE_UNSIGNED (type))
4988 temp = fold_convert (signed_type_for (type), temp);
4990 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4991 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4993 temp = const_binop (BIT_AND_EXPR, temp,
4994 fold_convert (TREE_TYPE (c), mask),
4996 /* If necessary, convert the type back to match the type of C. */
4997 if (TYPE_UNSIGNED (type))
4998 temp = fold_convert (type, temp);
5000 return fold_convert (type,
5001 const_binop (BIT_XOR_EXPR, c, temp, 0));
5004 /* For an expression that has the form
5008 we can drop one of the inner expressions and simplify to
5012 LOC is the location of the resulting expression. OP is the inner
5013 logical operation; the left-hand side in the examples above, while CMPOP
5014 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5015 removing a condition that guards another, as in
5016 (A != NULL && A->...) || A == NULL
5017 which we must not transform. If RHS_ONLY is true, only eliminate the
5018 right-most operand of the inner logical operation. */
5021 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5024 tree type = TREE_TYPE (cmpop);
5025 enum tree_code code = TREE_CODE (cmpop);
5026 enum tree_code truthop_code = TREE_CODE (op);
5027 tree lhs = TREE_OPERAND (op, 0);
5028 tree rhs = TREE_OPERAND (op, 1);
5029 tree orig_lhs = lhs, orig_rhs = rhs;
5030 enum tree_code rhs_code = TREE_CODE (rhs);
5031 enum tree_code lhs_code = TREE_CODE (lhs);
5032 enum tree_code inv_code;
5034 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5037 if (TREE_CODE_CLASS (code) != tcc_comparison)
5040 if (rhs_code == truthop_code)
5042 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5043 if (newrhs != NULL_TREE)
5046 rhs_code = TREE_CODE (rhs);
5049 if (lhs_code == truthop_code && !rhs_only)
5051 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5052 if (newlhs != NULL_TREE)
5055 lhs_code = TREE_CODE (lhs);
5059 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5060 if (inv_code == rhs_code
5061 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5062 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5064 if (!rhs_only && inv_code == lhs_code
5065 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5066 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5068 if (rhs != orig_rhs || lhs != orig_lhs)
5069 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5074 /* Find ways of folding logical expressions of LHS and RHS:
5075 Try to merge two comparisons to the same innermost item.
5076 Look for range tests like "ch >= '0' && ch <= '9'".
5077 Look for combinations of simple terms on machines with expensive branches
5078 and evaluate the RHS unconditionally.
5080 For example, if we have p->a == 2 && p->b == 4 and we can make an
5081 object large enough to span both A and B, we can do this with a comparison
5082 against the object ANDed with the a mask.
5084 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5085 operations to do this with one comparison.
5087 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5088 function and the one above.
5090 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5091 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5093 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5096 We return the simplified tree or 0 if no optimization is possible. */
5099 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5102 /* If this is the "or" of two comparisons, we can do something if
5103 the comparisons are NE_EXPR. If this is the "and", we can do something
5104 if the comparisons are EQ_EXPR. I.e.,
5105 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5107 WANTED_CODE is this operation code. For single bit fields, we can
5108 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5109 comparison for one-bit fields. */
5111 enum tree_code wanted_code;
5112 enum tree_code lcode, rcode;
5113 tree ll_arg, lr_arg, rl_arg, rr_arg;
5114 tree ll_inner, lr_inner, rl_inner, rr_inner;
5115 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5116 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5117 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5118 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5119 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5120 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5121 enum machine_mode lnmode, rnmode;
5122 tree ll_mask, lr_mask, rl_mask, rr_mask;
5123 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5124 tree l_const, r_const;
5125 tree lntype, rntype, result;
5126 HOST_WIDE_INT first_bit, end_bit;
5128 tree orig_lhs = lhs, orig_rhs = rhs;
5129 enum tree_code orig_code = code;
5131 /* Start by getting the comparison codes. Fail if anything is volatile.
5132 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5133 it were surrounded with a NE_EXPR. */
5135 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5138 lcode = TREE_CODE (lhs);
5139 rcode = TREE_CODE (rhs);
5141 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5143 lhs = build2 (NE_EXPR, truth_type, lhs,
5144 build_int_cst (TREE_TYPE (lhs), 0));
5148 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5150 rhs = build2 (NE_EXPR, truth_type, rhs,
5151 build_int_cst (TREE_TYPE (rhs), 0));
5155 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5156 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5159 ll_arg = TREE_OPERAND (lhs, 0);
5160 lr_arg = TREE_OPERAND (lhs, 1);
5161 rl_arg = TREE_OPERAND (rhs, 0);
5162 rr_arg = TREE_OPERAND (rhs, 1);
5164 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5165 if (simple_operand_p (ll_arg)
5166 && simple_operand_p (lr_arg))
5169 if (operand_equal_p (ll_arg, rl_arg, 0)
5170 && operand_equal_p (lr_arg, rr_arg, 0))
5172 result = combine_comparisons (loc, code, lcode, rcode,
5173 truth_type, ll_arg, lr_arg);
5177 else if (operand_equal_p (ll_arg, rr_arg, 0)
5178 && operand_equal_p (lr_arg, rl_arg, 0))
5180 result = combine_comparisons (loc, code, lcode,
5181 swap_tree_comparison (rcode),
5182 truth_type, ll_arg, lr_arg);
5188 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5189 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5191 /* If the RHS can be evaluated unconditionally and its operands are
5192 simple, it wins to evaluate the RHS unconditionally on machines
5193 with expensive branches. In this case, this isn't a comparison
5194 that can be merged. Avoid doing this if the RHS is a floating-point
5195 comparison since those can trap. */
5197 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5199 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5200 && simple_operand_p (rl_arg)
5201 && simple_operand_p (rr_arg))
5203 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5204 if (code == TRUTH_OR_EXPR
5205 && lcode == NE_EXPR && integer_zerop (lr_arg)
5206 && rcode == NE_EXPR && integer_zerop (rr_arg)
5207 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5208 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5210 result = build2 (NE_EXPR, truth_type,
5211 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5213 build_int_cst (TREE_TYPE (ll_arg), 0));
5214 goto fold_truthop_exit;
5217 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5218 if (code == TRUTH_AND_EXPR
5219 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5220 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5221 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5222 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5224 result = build2 (EQ_EXPR, truth_type,
5225 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5227 build_int_cst (TREE_TYPE (ll_arg), 0));
5228 goto fold_truthop_exit;
5231 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5233 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5235 result = build2 (code, truth_type, lhs, rhs);
5236 goto fold_truthop_exit;
5242 /* See if the comparisons can be merged. Then get all the parameters for
5245 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5246 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5250 ll_inner = decode_field_reference (loc, ll_arg,
5251 &ll_bitsize, &ll_bitpos, &ll_mode,
5252 &ll_unsignedp, &volatilep, &ll_mask,
5254 lr_inner = decode_field_reference (loc, lr_arg,
5255 &lr_bitsize, &lr_bitpos, &lr_mode,
5256 &lr_unsignedp, &volatilep, &lr_mask,
5258 rl_inner = decode_field_reference (loc, rl_arg,
5259 &rl_bitsize, &rl_bitpos, &rl_mode,
5260 &rl_unsignedp, &volatilep, &rl_mask,
5262 rr_inner = decode_field_reference (loc, rr_arg,
5263 &rr_bitsize, &rr_bitpos, &rr_mode,
5264 &rr_unsignedp, &volatilep, &rr_mask,
5267 /* It must be true that the inner operation on the lhs of each
5268 comparison must be the same if we are to be able to do anything.
5269 Then see if we have constants. If not, the same must be true for
5271 if (volatilep || ll_inner == 0 || rl_inner == 0
5272 || ! operand_equal_p (ll_inner, rl_inner, 0))
5275 if (TREE_CODE (lr_arg) == INTEGER_CST
5276 && TREE_CODE (rr_arg) == INTEGER_CST)
5277 l_const = lr_arg, r_const = rr_arg;
5278 else if (lr_inner == 0 || rr_inner == 0
5279 || ! operand_equal_p (lr_inner, rr_inner, 0))
5282 l_const = r_const = 0;
5284 /* If either comparison code is not correct for our logical operation,
5285 fail. However, we can convert a one-bit comparison against zero into
5286 the opposite comparison against that bit being set in the field. */
5288 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5289 if (lcode != wanted_code)
5291 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5293 /* Make the left operand unsigned, since we are only interested
5294 in the value of one bit. Otherwise we are doing the wrong
5303 /* This is analogous to the code for l_const above. */
5304 if (rcode != wanted_code)
5306 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5315 /* See if we can find a mode that contains both fields being compared on
5316 the left. If we can't, fail. Otherwise, update all constants and masks
5317 to be relative to a field of that size. */
5318 first_bit = MIN (ll_bitpos, rl_bitpos);
5319 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5320 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5321 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5323 if (lnmode == VOIDmode)
5326 lnbitsize = GET_MODE_BITSIZE (lnmode);
5327 lnbitpos = first_bit & ~ (lnbitsize - 1);
5328 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5329 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5331 if (BYTES_BIG_ENDIAN)
5333 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5334 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5337 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5338 size_int (xll_bitpos), 0);
5339 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5340 size_int (xrl_bitpos), 0);
5344 l_const = fold_convert_loc (loc, lntype, l_const);
5345 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5346 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5347 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5348 fold_build1_loc (loc, BIT_NOT_EXPR,
5352 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5354 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5359 r_const = fold_convert_loc (loc, lntype, r_const);
5360 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5361 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5362 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5363 fold_build1_loc (loc, BIT_NOT_EXPR,
5367 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5369 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5373 /* If the right sides are not constant, do the same for it. Also,
5374 disallow this optimization if a size or signedness mismatch occurs
5375 between the left and right sides. */
5378 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5379 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5380 /* Make sure the two fields on the right
5381 correspond to the left without being swapped. */
5382 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5385 first_bit = MIN (lr_bitpos, rr_bitpos);
5386 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5387 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5388 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5390 if (rnmode == VOIDmode)
5393 rnbitsize = GET_MODE_BITSIZE (rnmode);
5394 rnbitpos = first_bit & ~ (rnbitsize - 1);
5395 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5396 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5398 if (BYTES_BIG_ENDIAN)
5400 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5401 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5404 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5406 size_int (xlr_bitpos), 0);
5407 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5409 size_int (xrr_bitpos), 0);
5411 /* Make a mask that corresponds to both fields being compared.
5412 Do this for both items being compared. If the operands are the
5413 same size and the bits being compared are in the same position
5414 then we can do this by masking both and comparing the masked
5416 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5417 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5418 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5420 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5421 ll_unsignedp || rl_unsignedp);
5422 if (! all_ones_mask_p (ll_mask, lnbitsize))
5423 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5425 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5426 lr_unsignedp || rr_unsignedp);
5427 if (! all_ones_mask_p (lr_mask, rnbitsize))
5428 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5430 result = build2 (wanted_code, truth_type, lhs, rhs);
5431 goto fold_truthop_exit;
5434 /* There is still another way we can do something: If both pairs of
5435 fields being compared are adjacent, we may be able to make a wider
5436 field containing them both.
5438 Note that we still must mask the lhs/rhs expressions. Furthermore,
5439 the mask must be shifted to account for the shift done by
5440 make_bit_field_ref. */
5441 if ((ll_bitsize + ll_bitpos == rl_bitpos
5442 && lr_bitsize + lr_bitpos == rr_bitpos)
5443 || (ll_bitpos == rl_bitpos + rl_bitsize
5444 && lr_bitpos == rr_bitpos + rr_bitsize))
5448 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5449 ll_bitsize + rl_bitsize,
5450 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5451 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5452 lr_bitsize + rr_bitsize,
5453 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5455 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5456 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5457 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5458 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5460 /* Convert to the smaller type before masking out unwanted bits. */
5462 if (lntype != rntype)
5464 if (lnbitsize > rnbitsize)
5466 lhs = fold_convert_loc (loc, rntype, lhs);
5467 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5470 else if (lnbitsize < rnbitsize)
5472 rhs = fold_convert_loc (loc, lntype, rhs);
5473 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5478 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5479 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5481 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5482 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5484 result = build2 (wanted_code, truth_type, lhs, rhs);
5485 goto fold_truthop_exit;
5491 /* Handle the case of comparisons with constants. If there is something in
5492 common between the masks, those bits of the constants must be the same.
5493 If not, the condition is always false. Test for this to avoid generating
5494 incorrect code below. */
5495 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5496 if (! integer_zerop (result)
5497 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5498 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5500 if (wanted_code == NE_EXPR)
5502 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5503 return constant_boolean_node (true, truth_type);
5507 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5508 return constant_boolean_node (false, truth_type);
5512 /* Construct the expression we will return. First get the component
5513 reference we will make. Unless the mask is all ones the width of
5514 that field, perform the mask operation. Then compare with the
5516 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5517 ll_unsignedp || rl_unsignedp);
5519 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5520 if (! all_ones_mask_p (ll_mask, lnbitsize))
5522 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5523 SET_EXPR_LOCATION (result, loc);
5526 result = build2 (wanted_code, truth_type, result,
5527 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5530 SET_EXPR_LOCATION (result, loc);
5534 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5538 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5542 enum tree_code op_code;
5545 int consts_equal, consts_lt;
5548 STRIP_SIGN_NOPS (arg0);
5550 op_code = TREE_CODE (arg0);
5551 minmax_const = TREE_OPERAND (arg0, 1);
5552 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5553 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5554 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5555 inner = TREE_OPERAND (arg0, 0);
5557 /* If something does not permit us to optimize, return the original tree. */
5558 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5559 || TREE_CODE (comp_const) != INTEGER_CST
5560 || TREE_OVERFLOW (comp_const)
5561 || TREE_CODE (minmax_const) != INTEGER_CST
5562 || TREE_OVERFLOW (minmax_const))
5565 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5566 and GT_EXPR, doing the rest with recursive calls using logical
5570 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5573 = optimize_minmax_comparison (loc,
5574 invert_tree_comparison (code, false),
5577 return invert_truthvalue_loc (loc, tem);
5583 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5584 optimize_minmax_comparison
5585 (loc, EQ_EXPR, type, arg0, comp_const),
5586 optimize_minmax_comparison
5587 (loc, GT_EXPR, type, arg0, comp_const));
5590 if (op_code == MAX_EXPR && consts_equal)
5591 /* MAX (X, 0) == 0 -> X <= 0 */
5592 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5594 else if (op_code == MAX_EXPR && consts_lt)
5595 /* MAX (X, 0) == 5 -> X == 5 */
5596 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5598 else if (op_code == MAX_EXPR)
5599 /* MAX (X, 0) == -1 -> false */
5600 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5602 else if (consts_equal)
5603 /* MIN (X, 0) == 0 -> X >= 0 */
5604 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5607 /* MIN (X, 0) == 5 -> false */
5608 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5611 /* MIN (X, 0) == -1 -> X == -1 */
5612 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5615 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5616 /* MAX (X, 0) > 0 -> X > 0
5617 MAX (X, 0) > 5 -> X > 5 */
5618 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5620 else if (op_code == MAX_EXPR)
5621 /* MAX (X, 0) > -1 -> true */
5622 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5624 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5625 /* MIN (X, 0) > 0 -> false
5626 MIN (X, 0) > 5 -> false */
5627 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5630 /* MIN (X, 0) > -1 -> X > -1 */
5631 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5638 /* T is an integer expression that is being multiplied, divided, or taken a
5639 modulus (CODE says which and what kind of divide or modulus) by a
5640 constant C. See if we can eliminate that operation by folding it with
5641 other operations already in T. WIDE_TYPE, if non-null, is a type that
5642 should be used for the computation if wider than our type.
5644 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5645 (X * 2) + (Y * 4). We must, however, be assured that either the original
5646 expression would not overflow or that overflow is undefined for the type
5647 in the language in question.
5649 If we return a non-null expression, it is an equivalent form of the
5650 original computation, but need not be in the original type.
5652 We set *STRICT_OVERFLOW_P to true if the return values depends on
5653 signed overflow being undefined. Otherwise we do not change
5654 *STRICT_OVERFLOW_P. */
5657 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5658 bool *strict_overflow_p)
5660 /* To avoid exponential search depth, refuse to allow recursion past
5661 three levels. Beyond that (1) it's highly unlikely that we'll find
5662 something interesting and (2) we've probably processed it before
5663 when we built the inner expression. */
5672 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5679 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5680 bool *strict_overflow_p)
5682 tree type = TREE_TYPE (t);
5683 enum tree_code tcode = TREE_CODE (t);
5684 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5685 > GET_MODE_SIZE (TYPE_MODE (type)))
5686 ? wide_type : type);
5688 int same_p = tcode == code;
5689 tree op0 = NULL_TREE, op1 = NULL_TREE;
5690 bool sub_strict_overflow_p;
5692 /* Don't deal with constants of zero here; they confuse the code below. */
5693 if (integer_zerop (c))
5696 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5697 op0 = TREE_OPERAND (t, 0);
5699 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5700 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5702 /* Note that we need not handle conditional operations here since fold
5703 already handles those cases. So just do arithmetic here. */
5707 /* For a constant, we can always simplify if we are a multiply
5708 or (for divide and modulus) if it is a multiple of our constant. */
5709 if (code == MULT_EXPR
5710 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5711 return const_binop (code, fold_convert (ctype, t),
5712 fold_convert (ctype, c), 0);
5715 CASE_CONVERT: case NON_LVALUE_EXPR:
5716 /* If op0 is an expression ... */
5717 if ((COMPARISON_CLASS_P (op0)
5718 || UNARY_CLASS_P (op0)
5719 || BINARY_CLASS_P (op0)
5720 || VL_EXP_CLASS_P (op0)
5721 || EXPRESSION_CLASS_P (op0))
5722 /* ... and has wrapping overflow, and its type is smaller
5723 than ctype, then we cannot pass through as widening. */
5724 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5725 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5726 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5727 && (TYPE_PRECISION (ctype)
5728 > TYPE_PRECISION (TREE_TYPE (op0))))
5729 /* ... or this is a truncation (t is narrower than op0),
5730 then we cannot pass through this narrowing. */
5731 || (TYPE_PRECISION (type)
5732 < TYPE_PRECISION (TREE_TYPE (op0)))
5733 /* ... or signedness changes for division or modulus,
5734 then we cannot pass through this conversion. */
5735 || (code != MULT_EXPR
5736 && (TYPE_UNSIGNED (ctype)
5737 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5738 /* ... or has undefined overflow while the converted to
5739 type has not, we cannot do the operation in the inner type
5740 as that would introduce undefined overflow. */
5741 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5742 && !TYPE_OVERFLOW_UNDEFINED (type))))
5745 /* Pass the constant down and see if we can make a simplification. If
5746 we can, replace this expression with the inner simplification for
5747 possible later conversion to our or some other type. */
5748 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5749 && TREE_CODE (t2) == INTEGER_CST
5750 && !TREE_OVERFLOW (t2)
5751 && (0 != (t1 = extract_muldiv (op0, t2, code,
5753 ? ctype : NULL_TREE,
5754 strict_overflow_p))))
5759 /* If widening the type changes it from signed to unsigned, then we
5760 must avoid building ABS_EXPR itself as unsigned. */
5761 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5763 tree cstype = (*signed_type_for) (ctype);
5764 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5767 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5768 return fold_convert (ctype, t1);
5772 /* If the constant is negative, we cannot simplify this. */
5773 if (tree_int_cst_sgn (c) == -1)
5777 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5779 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5782 case MIN_EXPR: case MAX_EXPR:
5783 /* If widening the type changes the signedness, then we can't perform
5784 this optimization as that changes the result. */
5785 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5788 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5789 sub_strict_overflow_p = false;
5790 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5791 &sub_strict_overflow_p)) != 0
5792 && (t2 = extract_muldiv (op1, c, code, wide_type,
5793 &sub_strict_overflow_p)) != 0)
5795 if (tree_int_cst_sgn (c) < 0)
5796 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5797 if (sub_strict_overflow_p)
5798 *strict_overflow_p = true;
5799 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5800 fold_convert (ctype, t2));
5804 case LSHIFT_EXPR: case RSHIFT_EXPR:
5805 /* If the second operand is constant, this is a multiplication
5806 or floor division, by a power of two, so we can treat it that
5807 way unless the multiplier or divisor overflows. Signed
5808 left-shift overflow is implementation-defined rather than
5809 undefined in C90, so do not convert signed left shift into
5811 if (TREE_CODE (op1) == INTEGER_CST
5812 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5813 /* const_binop may not detect overflow correctly,
5814 so check for it explicitly here. */
5815 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5816 && TREE_INT_CST_HIGH (op1) == 0
5817 && 0 != (t1 = fold_convert (ctype,
5818 const_binop (LSHIFT_EXPR,
5821 && !TREE_OVERFLOW (t1))
5822 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5823 ? MULT_EXPR : FLOOR_DIV_EXPR,
5825 fold_convert (ctype, op0),
5827 c, code, wide_type, strict_overflow_p);
5830 case PLUS_EXPR: case MINUS_EXPR:
5831 /* See if we can eliminate the operation on both sides. If we can, we
5832 can return a new PLUS or MINUS. If we can't, the only remaining
5833 cases where we can do anything are if the second operand is a
5835 sub_strict_overflow_p = false;
5836 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5837 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5838 if (t1 != 0 && t2 != 0
5839 && (code == MULT_EXPR
5840 /* If not multiplication, we can only do this if both operands
5841 are divisible by c. */
5842 || (multiple_of_p (ctype, op0, c)
5843 && multiple_of_p (ctype, op1, c))))
5845 if (sub_strict_overflow_p)
5846 *strict_overflow_p = true;
5847 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5848 fold_convert (ctype, t2));
5851 /* If this was a subtraction, negate OP1 and set it to be an addition.
5852 This simplifies the logic below. */
5853 if (tcode == MINUS_EXPR)
5855 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5856 /* If OP1 was not easily negatable, the constant may be OP0. */
5857 if (TREE_CODE (op0) == INTEGER_CST)
5868 if (TREE_CODE (op1) != INTEGER_CST)
5871 /* If either OP1 or C are negative, this optimization is not safe for
5872 some of the division and remainder types while for others we need
5873 to change the code. */
5874 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5876 if (code == CEIL_DIV_EXPR)
5877 code = FLOOR_DIV_EXPR;
5878 else if (code == FLOOR_DIV_EXPR)
5879 code = CEIL_DIV_EXPR;
5880 else if (code != MULT_EXPR
5881 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5885 /* If it's a multiply or a division/modulus operation of a multiple
5886 of our constant, do the operation and verify it doesn't overflow. */
5887 if (code == MULT_EXPR
5888 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5890 op1 = const_binop (code, fold_convert (ctype, op1),
5891 fold_convert (ctype, c), 0);
5892 /* We allow the constant to overflow with wrapping semantics. */
5894 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5900 /* If we have an unsigned type is not a sizetype, we cannot widen
5901 the operation since it will change the result if the original
5902 computation overflowed. */
5903 if (TYPE_UNSIGNED (ctype)
5904 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5908 /* If we were able to eliminate our operation from the first side,
5909 apply our operation to the second side and reform the PLUS. */
5910 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5911 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5913 /* The last case is if we are a multiply. In that case, we can
5914 apply the distributive law to commute the multiply and addition
5915 if the multiplication of the constants doesn't overflow. */
5916 if (code == MULT_EXPR)
5917 return fold_build2 (tcode, ctype,
5918 fold_build2 (code, ctype,
5919 fold_convert (ctype, op0),
5920 fold_convert (ctype, c)),
5926 /* We have a special case here if we are doing something like
5927 (C * 8) % 4 since we know that's zero. */
5928 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5929 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5930 /* If the multiplication can overflow we cannot optimize this.
5931 ??? Until we can properly mark individual operations as
5932 not overflowing we need to treat sizetype special here as
5933 stor-layout relies on this opimization to make
5934 DECL_FIELD_BIT_OFFSET always a constant. */
5935 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5936 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5937 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5938 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5939 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5941 *strict_overflow_p = true;
5942 return omit_one_operand (type, integer_zero_node, op0);
5945 /* ... fall through ... */
5947 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5948 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5949 /* If we can extract our operation from the LHS, do so and return a
5950 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5951 do something only if the second operand is a constant. */
5953 && (t1 = extract_muldiv (op0, c, code, wide_type,
5954 strict_overflow_p)) != 0)
5955 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5956 fold_convert (ctype, op1));
5957 else if (tcode == MULT_EXPR && code == MULT_EXPR
5958 && (t1 = extract_muldiv (op1, c, code, wide_type,
5959 strict_overflow_p)) != 0)
5960 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5961 fold_convert (ctype, t1));
5962 else if (TREE_CODE (op1) != INTEGER_CST)
5965 /* If these are the same operation types, we can associate them
5966 assuming no overflow. */
5968 && 0 != (t1 = int_const_binop (MULT_EXPR,
5969 fold_convert (ctype, op1),
5970 fold_convert (ctype, c), 1))
5971 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5972 TREE_INT_CST_HIGH (t1),
5973 (TYPE_UNSIGNED (ctype)
5974 && tcode != MULT_EXPR) ? -1 : 1,
5975 TREE_OVERFLOW (t1)))
5976 && !TREE_OVERFLOW (t1))
5977 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5979 /* If these operations "cancel" each other, we have the main
5980 optimizations of this pass, which occur when either constant is a
5981 multiple of the other, in which case we replace this with either an
5982 operation or CODE or TCODE.
5984 If we have an unsigned type that is not a sizetype, we cannot do
5985 this since it will change the result if the original computation
5987 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5988 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5989 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5990 || (tcode == MULT_EXPR
5991 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5992 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5993 && code != MULT_EXPR)))
5995 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5997 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5998 *strict_overflow_p = true;
5999 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6000 fold_convert (ctype,
6001 const_binop (TRUNC_DIV_EXPR,
6004 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6006 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6007 *strict_overflow_p = true;
6008 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6009 fold_convert (ctype,
6010 const_binop (TRUNC_DIV_EXPR,
6023 /* Return a node which has the indicated constant VALUE (either 0 or
6024 1), and is of the indicated TYPE. */
6027 constant_boolean_node (int value, tree type)
6029 if (type == integer_type_node)
6030 return value ? integer_one_node : integer_zero_node;
6031 else if (type == boolean_type_node)
6032 return value ? boolean_true_node : boolean_false_node;
6034 return build_int_cst (type, value);
6038 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6039 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6040 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6041 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6042 COND is the first argument to CODE; otherwise (as in the example
6043 given here), it is the second argument. TYPE is the type of the
6044 original expression. Return NULL_TREE if no simplification is
6048 fold_binary_op_with_conditional_arg (location_t loc,
6049 enum tree_code code,
6050 tree type, tree op0, tree op1,
6051 tree cond, tree arg, int cond_first_p)
6053 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6054 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6055 tree test, true_value, false_value;
6056 tree lhs = NULL_TREE;
6057 tree rhs = NULL_TREE;
6059 if (TREE_CODE (cond) == COND_EXPR)
6061 test = TREE_OPERAND (cond, 0);
6062 true_value = TREE_OPERAND (cond, 1);
6063 false_value = TREE_OPERAND (cond, 2);
6064 /* If this operand throws an expression, then it does not make
6065 sense to try to perform a logical or arithmetic operation
6067 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6069 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6074 tree testtype = TREE_TYPE (cond);
6076 true_value = constant_boolean_node (true, testtype);
6077 false_value = constant_boolean_node (false, testtype);
6080 /* This transformation is only worthwhile if we don't have to wrap ARG
6081 in a SAVE_EXPR and the operation can be simplified on at least one
6082 of the branches once its pushed inside the COND_EXPR. */
6083 if (!TREE_CONSTANT (arg)
6084 && (TREE_SIDE_EFFECTS (arg)
6085 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6088 arg = fold_convert_loc (loc, arg_type, arg);
6091 true_value = fold_convert_loc (loc, cond_type, true_value);
6093 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6095 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6099 false_value = fold_convert_loc (loc, cond_type, false_value);
6101 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6103 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6106 /* Check that we have simplified at least one of the branches. */
6107 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6110 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6114 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6116 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6117 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6118 ADDEND is the same as X.
6120 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6121 and finite. The problematic cases are when X is zero, and its mode
6122 has signed zeros. In the case of rounding towards -infinity,
6123 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6124 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6127 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6129 if (!real_zerop (addend))
6132 /* Don't allow the fold with -fsignaling-nans. */
6133 if (HONOR_SNANS (TYPE_MODE (type)))
6136 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6137 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6140 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6141 if (TREE_CODE (addend) == REAL_CST
6142 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6145 /* The mode has signed zeros, and we have to honor their sign.
6146 In this situation, there is only one case we can return true for.
6147 X - 0 is the same as X unless rounding towards -infinity is
6149 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6152 /* Subroutine of fold() that checks comparisons of built-in math
6153 functions against real constants.
6155 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6156 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6157 is the type of the result and ARG0 and ARG1 are the operands of the
6158 comparison. ARG1 must be a TREE_REAL_CST.
6160 The function returns the constant folded tree if a simplification
6161 can be made, and NULL_TREE otherwise. */
6164 fold_mathfn_compare (location_t loc,
6165 enum built_in_function fcode, enum tree_code code,
6166 tree type, tree arg0, tree arg1)
6170 if (BUILTIN_SQRT_P (fcode))
6172 tree arg = CALL_EXPR_ARG (arg0, 0);
6173 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6175 c = TREE_REAL_CST (arg1);
6176 if (REAL_VALUE_NEGATIVE (c))
6178 /* sqrt(x) < y is always false, if y is negative. */
6179 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6180 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6182 /* sqrt(x) > y is always true, if y is negative and we
6183 don't care about NaNs, i.e. negative values of x. */
6184 if (code == NE_EXPR || !HONOR_NANS (mode))
6185 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6187 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6188 return fold_build2_loc (loc, GE_EXPR, type, arg,
6189 build_real (TREE_TYPE (arg), dconst0));
6191 else if (code == GT_EXPR || code == GE_EXPR)
6195 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6196 real_convert (&c2, mode, &c2);
6198 if (REAL_VALUE_ISINF (c2))
6200 /* sqrt(x) > y is x == +Inf, when y is very large. */
6201 if (HONOR_INFINITIES (mode))
6202 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6203 build_real (TREE_TYPE (arg), c2));
6205 /* sqrt(x) > y is always false, when y is very large
6206 and we don't care about infinities. */
6207 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6210 /* sqrt(x) > c is the same as x > c*c. */
6211 return fold_build2_loc (loc, code, type, arg,
6212 build_real (TREE_TYPE (arg), c2));
6214 else if (code == LT_EXPR || code == LE_EXPR)
6218 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6219 real_convert (&c2, mode, &c2);
6221 if (REAL_VALUE_ISINF (c2))
6223 /* sqrt(x) < y is always true, when y is a very large
6224 value and we don't care about NaNs or Infinities. */
6225 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6226 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6228 /* sqrt(x) < y is x != +Inf when y is very large and we
6229 don't care about NaNs. */
6230 if (! HONOR_NANS (mode))
6231 return fold_build2_loc (loc, NE_EXPR, type, arg,
6232 build_real (TREE_TYPE (arg), c2));
6234 /* sqrt(x) < y is x >= 0 when y is very large and we
6235 don't care about Infinities. */
6236 if (! HONOR_INFINITIES (mode))
6237 return fold_build2_loc (loc, GE_EXPR, type, arg,
6238 build_real (TREE_TYPE (arg), dconst0));
6240 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6241 if (lang_hooks.decls.global_bindings_p () != 0
6242 || CONTAINS_PLACEHOLDER_P (arg))
6245 arg = save_expr (arg);
6246 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6247 fold_build2_loc (loc, GE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg),
6250 fold_build2_loc (loc, NE_EXPR, type, arg,
6251 build_real (TREE_TYPE (arg),
6255 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6256 if (! HONOR_NANS (mode))
6257 return fold_build2_loc (loc, code, type, arg,
6258 build_real (TREE_TYPE (arg), c2));
6260 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6261 if (lang_hooks.decls.global_bindings_p () == 0
6262 && ! CONTAINS_PLACEHOLDER_P (arg))
6264 arg = save_expr (arg);
6265 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6266 fold_build2_loc (loc, GE_EXPR, type, arg,
6267 build_real (TREE_TYPE (arg),
6269 fold_build2_loc (loc, code, type, arg,
6270 build_real (TREE_TYPE (arg),
6279 /* Subroutine of fold() that optimizes comparisons against Infinities,
6280 either +Inf or -Inf.
6282 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6283 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6284 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6286 The function returns the constant folded tree if a simplification
6287 can be made, and NULL_TREE otherwise. */
6290 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6291 tree arg0, tree arg1)
6293 enum machine_mode mode;
6294 REAL_VALUE_TYPE max;
6298 mode = TYPE_MODE (TREE_TYPE (arg0));
6300 /* For negative infinity swap the sense of the comparison. */
6301 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6303 code = swap_tree_comparison (code);
6308 /* x > +Inf is always false, if with ignore sNANs. */
6309 if (HONOR_SNANS (mode))
6311 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6314 /* x <= +Inf is always true, if we don't case about NaNs. */
6315 if (! HONOR_NANS (mode))
6316 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6318 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6319 if (lang_hooks.decls.global_bindings_p () == 0
6320 && ! CONTAINS_PLACEHOLDER_P (arg0))
6322 arg0 = save_expr (arg0);
6323 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6329 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6330 real_maxval (&max, neg, mode);
6331 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6332 arg0, build_real (TREE_TYPE (arg0), max));
6335 /* x < +Inf is always equal to x <= DBL_MAX. */
6336 real_maxval (&max, neg, mode);
6337 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6338 arg0, build_real (TREE_TYPE (arg0), max));
6341 /* x != +Inf is always equal to !(x > DBL_MAX). */
6342 real_maxval (&max, neg, mode);
6343 if (! HONOR_NANS (mode))
6344 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6345 arg0, build_real (TREE_TYPE (arg0), max));
6347 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6348 arg0, build_real (TREE_TYPE (arg0), max));
6349 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6358 /* Subroutine of fold() that optimizes comparisons of a division by
6359 a nonzero integer constant against an integer constant, i.e.
6362 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6363 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6364 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6366 The function returns the constant folded tree if a simplification
6367 can be made, and NULL_TREE otherwise. */
6370 fold_div_compare (location_t loc,
6371 enum tree_code code, tree type, tree arg0, tree arg1)
6373 tree prod, tmp, hi, lo;
6374 tree arg00 = TREE_OPERAND (arg0, 0);
6375 tree arg01 = TREE_OPERAND (arg0, 1);
6376 unsigned HOST_WIDE_INT lpart;
6377 HOST_WIDE_INT hpart;
6378 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6382 /* We have to do this the hard way to detect unsigned overflow.
6383 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6384 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6385 TREE_INT_CST_HIGH (arg01),
6386 TREE_INT_CST_LOW (arg1),
6387 TREE_INT_CST_HIGH (arg1),
6388 &lpart, &hpart, unsigned_p);
6389 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6391 neg_overflow = false;
6395 tmp = int_const_binop (MINUS_EXPR, arg01,
6396 build_int_cst (TREE_TYPE (arg01), 1), 0);
6399 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6400 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6401 TREE_INT_CST_HIGH (prod),
6402 TREE_INT_CST_LOW (tmp),
6403 TREE_INT_CST_HIGH (tmp),
6404 &lpart, &hpart, unsigned_p);
6405 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6406 -1, overflow | TREE_OVERFLOW (prod));
6408 else if (tree_int_cst_sgn (arg01) >= 0)
6410 tmp = int_const_binop (MINUS_EXPR, arg01,
6411 build_int_cst (TREE_TYPE (arg01), 1), 0);
6412 switch (tree_int_cst_sgn (arg1))
6415 neg_overflow = true;
6416 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6421 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6426 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6436 /* A negative divisor reverses the relational operators. */
6437 code = swap_tree_comparison (code);
6439 tmp = int_const_binop (PLUS_EXPR, arg01,
6440 build_int_cst (TREE_TYPE (arg01), 1), 0);
6441 switch (tree_int_cst_sgn (arg1))
6444 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6449 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6454 neg_overflow = true;
6455 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6467 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6468 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6469 if (TREE_OVERFLOW (hi))
6470 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6471 if (TREE_OVERFLOW (lo))
6472 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6473 return build_range_check (loc, type, arg00, 1, lo, hi);
6476 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6477 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6478 if (TREE_OVERFLOW (hi))
6479 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6480 if (TREE_OVERFLOW (lo))
6481 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6482 return build_range_check (loc, type, arg00, 0, lo, hi);
6485 if (TREE_OVERFLOW (lo))
6487 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6488 return omit_one_operand_loc (loc, type, tmp, arg00);
6490 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6493 if (TREE_OVERFLOW (hi))
6495 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6496 return omit_one_operand_loc (loc, type, tmp, arg00);
6498 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6501 if (TREE_OVERFLOW (hi))
6503 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6504 return omit_one_operand_loc (loc, type, tmp, arg00);
6506 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6509 if (TREE_OVERFLOW (lo))
6511 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6512 return omit_one_operand_loc (loc, type, tmp, arg00);
6514 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6524 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6525 equality/inequality test, then return a simplified form of the test
6526 using a sign testing. Otherwise return NULL. TYPE is the desired
6530 fold_single_bit_test_into_sign_test (location_t loc,
6531 enum tree_code code, tree arg0, tree arg1,
6534 /* If this is testing a single bit, we can optimize the test. */
6535 if ((code == NE_EXPR || code == EQ_EXPR)
6536 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6537 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6539 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6540 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6541 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6543 if (arg00 != NULL_TREE
6544 /* This is only a win if casting to a signed type is cheap,
6545 i.e. when arg00's type is not a partial mode. */
6546 && TYPE_PRECISION (TREE_TYPE (arg00))
6547 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6549 tree stype = signed_type_for (TREE_TYPE (arg00));
6550 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6552 fold_convert_loc (loc, stype, arg00),
6553 build_int_cst (stype, 0));
6560 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6561 equality/inequality test, then return a simplified form of
6562 the test using shifts and logical operations. Otherwise return
6563 NULL. TYPE is the desired result type. */
6566 fold_single_bit_test (location_t loc, enum tree_code code,
6567 tree arg0, tree arg1, tree result_type)
6569 /* If this is testing a single bit, we can optimize the test. */
6570 if ((code == NE_EXPR || code == EQ_EXPR)
6571 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6572 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6574 tree inner = TREE_OPERAND (arg0, 0);
6575 tree type = TREE_TYPE (arg0);
6576 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6577 enum machine_mode operand_mode = TYPE_MODE (type);
6579 tree signed_type, unsigned_type, intermediate_type;
6582 /* First, see if we can fold the single bit test into a sign-bit
6584 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6589 /* Otherwise we have (A & C) != 0 where C is a single bit,
6590 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6591 Similarly for (A & C) == 0. */
6593 /* If INNER is a right shift of a constant and it plus BITNUM does
6594 not overflow, adjust BITNUM and INNER. */
6595 if (TREE_CODE (inner) == RSHIFT_EXPR
6596 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6597 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6598 && bitnum < TYPE_PRECISION (type)
6599 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6600 bitnum - TYPE_PRECISION (type)))
6602 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6603 inner = TREE_OPERAND (inner, 0);
6606 /* If we are going to be able to omit the AND below, we must do our
6607 operations as unsigned. If we must use the AND, we have a choice.
6608 Normally unsigned is faster, but for some machines signed is. */
6609 #ifdef LOAD_EXTEND_OP
6610 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6611 && !flag_syntax_only) ? 0 : 1;
6616 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6617 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6618 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6619 inner = fold_convert_loc (loc, intermediate_type, inner);
6622 inner = build2 (RSHIFT_EXPR, intermediate_type,
6623 inner, size_int (bitnum));
6625 one = build_int_cst (intermediate_type, 1);
6627 if (code == EQ_EXPR)
6628 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6630 /* Put the AND last so it can combine with more things. */
6631 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6633 /* Make sure to return the proper type. */
6634 inner = fold_convert_loc (loc, result_type, inner);
6641 /* Check whether we are allowed to reorder operands arg0 and arg1,
6642 such that the evaluation of arg1 occurs before arg0. */
6645 reorder_operands_p (const_tree arg0, const_tree arg1)
6647 if (! flag_evaluation_order)
6649 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6651 return ! TREE_SIDE_EFFECTS (arg0)
6652 && ! TREE_SIDE_EFFECTS (arg1);
6655 /* Test whether it is preferable two swap two operands, ARG0 and
6656 ARG1, for example because ARG0 is an integer constant and ARG1
6657 isn't. If REORDER is true, only recommend swapping if we can
6658 evaluate the operands in reverse order. */
6661 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6663 STRIP_SIGN_NOPS (arg0);
6664 STRIP_SIGN_NOPS (arg1);
6666 if (TREE_CODE (arg1) == INTEGER_CST)
6668 if (TREE_CODE (arg0) == INTEGER_CST)
6671 if (TREE_CODE (arg1) == REAL_CST)
6673 if (TREE_CODE (arg0) == REAL_CST)
6676 if (TREE_CODE (arg1) == FIXED_CST)
6678 if (TREE_CODE (arg0) == FIXED_CST)
6681 if (TREE_CODE (arg1) == COMPLEX_CST)
6683 if (TREE_CODE (arg0) == COMPLEX_CST)
6686 if (TREE_CONSTANT (arg1))
6688 if (TREE_CONSTANT (arg0))
6691 if (optimize_function_for_size_p (cfun))
6694 if (reorder && flag_evaluation_order
6695 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6698 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6699 for commutative and comparison operators. Ensuring a canonical
6700 form allows the optimizers to find additional redundancies without
6701 having to explicitly check for both orderings. */
6702 if (TREE_CODE (arg0) == SSA_NAME
6703 && TREE_CODE (arg1) == SSA_NAME
6704 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6707 /* Put SSA_NAMEs last. */
6708 if (TREE_CODE (arg1) == SSA_NAME)
6710 if (TREE_CODE (arg0) == SSA_NAME)
6713 /* Put variables last. */
6722 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6723 ARG0 is extended to a wider type. */
6726 fold_widened_comparison (location_t loc, enum tree_code code,
6727 tree type, tree arg0, tree arg1)
6729 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6731 tree shorter_type, outer_type;
6735 if (arg0_unw == arg0)
6737 shorter_type = TREE_TYPE (arg0_unw);
6739 #ifdef HAVE_canonicalize_funcptr_for_compare
6740 /* Disable this optimization if we're casting a function pointer
6741 type on targets that require function pointer canonicalization. */
6742 if (HAVE_canonicalize_funcptr_for_compare
6743 && TREE_CODE (shorter_type) == POINTER_TYPE
6744 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6748 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6751 arg1_unw = get_unwidened (arg1, NULL_TREE);
6753 /* If possible, express the comparison in the shorter mode. */
6754 if ((code == EQ_EXPR || code == NE_EXPR
6755 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6756 && (TREE_TYPE (arg1_unw) == shorter_type
6757 || ((TYPE_PRECISION (shorter_type)
6758 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6759 && (TYPE_UNSIGNED (shorter_type)
6760 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6761 || (TREE_CODE (arg1_unw) == INTEGER_CST
6762 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6763 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6764 && int_fits_type_p (arg1_unw, shorter_type))))
6765 return fold_build2_loc (loc, code, type, arg0_unw,
6766 fold_convert_loc (loc, shorter_type, arg1_unw));
6768 if (TREE_CODE (arg1_unw) != INTEGER_CST
6769 || TREE_CODE (shorter_type) != INTEGER_TYPE
6770 || !int_fits_type_p (arg1_unw, shorter_type))
6773 /* If we are comparing with the integer that does not fit into the range
6774 of the shorter type, the result is known. */
6775 outer_type = TREE_TYPE (arg1_unw);
6776 min = lower_bound_in_type (outer_type, shorter_type);
6777 max = upper_bound_in_type (outer_type, shorter_type);
6779 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6781 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6788 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6793 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6799 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6801 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6806 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6808 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6817 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6818 ARG0 just the signedness is changed. */
6821 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6822 tree arg0, tree arg1)
6825 tree inner_type, outer_type;
6827 if (!CONVERT_EXPR_P (arg0))
6830 outer_type = TREE_TYPE (arg0);
6831 arg0_inner = TREE_OPERAND (arg0, 0);
6832 inner_type = TREE_TYPE (arg0_inner);
6834 #ifdef HAVE_canonicalize_funcptr_for_compare
6835 /* Disable this optimization if we're casting a function pointer
6836 type on targets that require function pointer canonicalization. */
6837 if (HAVE_canonicalize_funcptr_for_compare
6838 && TREE_CODE (inner_type) == POINTER_TYPE
6839 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6843 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6846 if (TREE_CODE (arg1) != INTEGER_CST
6847 && !(CONVERT_EXPR_P (arg1)
6848 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6851 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6852 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6857 if (TREE_CODE (arg1) == INTEGER_CST)
6858 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6859 TREE_INT_CST_HIGH (arg1), 0,
6860 TREE_OVERFLOW (arg1));
6862 arg1 = fold_convert_loc (loc, inner_type, arg1);
6864 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6867 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6868 step of the array. Reconstructs s and delta in the case of s *
6869 delta being an integer constant (and thus already folded). ADDR is
6870 the address. MULT is the multiplicative expression. If the
6871 function succeeds, the new address expression is returned.
6872 Otherwise NULL_TREE is returned. LOC is the location of the
6873 resulting expression. */
6876 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6878 tree s, delta, step;
6879 tree ref = TREE_OPERAND (addr, 0), pref;
6884 /* Strip the nops that might be added when converting op1 to sizetype. */
6887 /* Canonicalize op1 into a possibly non-constant delta
6888 and an INTEGER_CST s. */
6889 if (TREE_CODE (op1) == MULT_EXPR)
6891 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6896 if (TREE_CODE (arg0) == INTEGER_CST)
6901 else if (TREE_CODE (arg1) == INTEGER_CST)
6909 else if (TREE_CODE (op1) == INTEGER_CST)
6916 /* Simulate we are delta * 1. */
6918 s = integer_one_node;
6921 for (;; ref = TREE_OPERAND (ref, 0))
6923 if (TREE_CODE (ref) == ARRAY_REF)
6927 /* Remember if this was a multi-dimensional array. */
6928 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6931 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6934 itype = TREE_TYPE (domain);
6936 step = array_ref_element_size (ref);
6937 if (TREE_CODE (step) != INTEGER_CST)
6942 if (! tree_int_cst_equal (step, s))
6947 /* Try if delta is a multiple of step. */
6948 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6954 /* Only fold here if we can verify we do not overflow one
6955 dimension of a multi-dimensional array. */
6960 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6961 || !TYPE_MAX_VALUE (domain)
6962 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6965 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6966 fold_convert_loc (loc, itype,
6967 TREE_OPERAND (ref, 1)),
6968 fold_convert_loc (loc, itype, delta));
6970 || TREE_CODE (tmp) != INTEGER_CST
6971 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6980 if (!handled_component_p (ref))
6984 /* We found the suitable array reference. So copy everything up to it,
6985 and replace the index. */
6987 pref = TREE_OPERAND (addr, 0);
6988 ret = copy_node (pref);
6989 SET_EXPR_LOCATION (ret, loc);
6994 pref = TREE_OPERAND (pref, 0);
6995 TREE_OPERAND (pos, 0) = copy_node (pref);
6996 pos = TREE_OPERAND (pos, 0);
6999 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7000 fold_convert_loc (loc, itype,
7001 TREE_OPERAND (pos, 1)),
7002 fold_convert_loc (loc, itype, delta));
7004 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7008 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7009 means A >= Y && A != MAX, but in this case we know that
7010 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7013 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7015 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7017 if (TREE_CODE (bound) == LT_EXPR)
7018 a = TREE_OPERAND (bound, 0);
7019 else if (TREE_CODE (bound) == GT_EXPR)
7020 a = TREE_OPERAND (bound, 1);
7024 typea = TREE_TYPE (a);
7025 if (!INTEGRAL_TYPE_P (typea)
7026 && !POINTER_TYPE_P (typea))
7029 if (TREE_CODE (ineq) == LT_EXPR)
7031 a1 = TREE_OPERAND (ineq, 1);
7032 y = TREE_OPERAND (ineq, 0);
7034 else if (TREE_CODE (ineq) == GT_EXPR)
7036 a1 = TREE_OPERAND (ineq, 0);
7037 y = TREE_OPERAND (ineq, 1);
7042 if (TREE_TYPE (a1) != typea)
7045 if (POINTER_TYPE_P (typea))
7047 /* Convert the pointer types into integer before taking the difference. */
7048 tree ta = fold_convert_loc (loc, ssizetype, a);
7049 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7050 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7053 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7055 if (!diff || !integer_onep (diff))
7058 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7061 /* Fold a sum or difference of at least one multiplication.
7062 Returns the folded tree or NULL if no simplification could be made. */
7065 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7066 tree arg0, tree arg1)
7068 tree arg00, arg01, arg10, arg11;
7069 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7071 /* (A * C) +- (B * C) -> (A+-B) * C.
7072 (A * C) +- A -> A * (C+-1).
7073 We are most concerned about the case where C is a constant,
7074 but other combinations show up during loop reduction. Since
7075 it is not difficult, try all four possibilities. */
7077 if (TREE_CODE (arg0) == MULT_EXPR)
7079 arg00 = TREE_OPERAND (arg0, 0);
7080 arg01 = TREE_OPERAND (arg0, 1);
7082 else if (TREE_CODE (arg0) == INTEGER_CST)
7084 arg00 = build_one_cst (type);
7089 /* We cannot generate constant 1 for fract. */
7090 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7093 arg01 = build_one_cst (type);
7095 if (TREE_CODE (arg1) == MULT_EXPR)
7097 arg10 = TREE_OPERAND (arg1, 0);
7098 arg11 = TREE_OPERAND (arg1, 1);
7100 else if (TREE_CODE (arg1) == INTEGER_CST)
7102 arg10 = build_one_cst (type);
7103 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7104 the purpose of this canonicalization. */
7105 if (TREE_INT_CST_HIGH (arg1) == -1
7106 && negate_expr_p (arg1)
7107 && code == PLUS_EXPR)
7109 arg11 = negate_expr (arg1);
7117 /* We cannot generate constant 1 for fract. */
7118 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7121 arg11 = build_one_cst (type);
7125 if (operand_equal_p (arg01, arg11, 0))
7126 same = arg01, alt0 = arg00, alt1 = arg10;
7127 else if (operand_equal_p (arg00, arg10, 0))
7128 same = arg00, alt0 = arg01, alt1 = arg11;
7129 else if (operand_equal_p (arg00, arg11, 0))
7130 same = arg00, alt0 = arg01, alt1 = arg10;
7131 else if (operand_equal_p (arg01, arg10, 0))
7132 same = arg01, alt0 = arg00, alt1 = arg11;
7134 /* No identical multiplicands; see if we can find a common
7135 power-of-two factor in non-power-of-two multiplies. This
7136 can help in multi-dimensional array access. */
7137 else if (host_integerp (arg01, 0)
7138 && host_integerp (arg11, 0))
7140 HOST_WIDE_INT int01, int11, tmp;
7143 int01 = TREE_INT_CST_LOW (arg01);
7144 int11 = TREE_INT_CST_LOW (arg11);
7146 /* Move min of absolute values to int11. */
7147 if ((int01 >= 0 ? int01 : -int01)
7148 < (int11 >= 0 ? int11 : -int11))
7150 tmp = int01, int01 = int11, int11 = tmp;
7151 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7158 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7159 /* The remainder should not be a constant, otherwise we
7160 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7161 increased the number of multiplications necessary. */
7162 && TREE_CODE (arg10) != INTEGER_CST)
7164 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7165 build_int_cst (TREE_TYPE (arg00),
7170 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7175 return fold_build2_loc (loc, MULT_EXPR, type,
7176 fold_build2_loc (loc, code, type,
7177 fold_convert_loc (loc, type, alt0),
7178 fold_convert_loc (loc, type, alt1)),
7179 fold_convert_loc (loc, type, same));
7184 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7185 specified by EXPR into the buffer PTR of length LEN bytes.
7186 Return the number of bytes placed in the buffer, or zero
7190 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7192 tree type = TREE_TYPE (expr);
7193 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7194 int byte, offset, word, words;
7195 unsigned char value;
7197 if (total_bytes > len)
7199 words = total_bytes / UNITS_PER_WORD;
7201 for (byte = 0; byte < total_bytes; byte++)
7203 int bitpos = byte * BITS_PER_UNIT;
7204 if (bitpos < HOST_BITS_PER_WIDE_INT)
7205 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7207 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7208 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7210 if (total_bytes > UNITS_PER_WORD)
7212 word = byte / UNITS_PER_WORD;
7213 if (WORDS_BIG_ENDIAN)
7214 word = (words - 1) - word;
7215 offset = word * UNITS_PER_WORD;
7216 if (BYTES_BIG_ENDIAN)
7217 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7219 offset += byte % UNITS_PER_WORD;
7222 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7223 ptr[offset] = value;
7229 /* Subroutine of native_encode_expr. Encode the REAL_CST
7230 specified by EXPR into the buffer PTR of length LEN bytes.
7231 Return the number of bytes placed in the buffer, or zero
7235 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7237 tree type = TREE_TYPE (expr);
7238 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7239 int byte, offset, word, words, bitpos;
7240 unsigned char value;
7242 /* There are always 32 bits in each long, no matter the size of
7243 the hosts long. We handle floating point representations with
7247 if (total_bytes > len)
7249 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7251 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7253 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7254 bitpos += BITS_PER_UNIT)
7256 byte = (bitpos / BITS_PER_UNIT) & 3;
7257 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7259 if (UNITS_PER_WORD < 4)
7261 word = byte / UNITS_PER_WORD;
7262 if (WORDS_BIG_ENDIAN)
7263 word = (words - 1) - word;
7264 offset = word * UNITS_PER_WORD;
7265 if (BYTES_BIG_ENDIAN)
7266 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7268 offset += byte % UNITS_PER_WORD;
7271 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7272 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7277 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7278 specified by EXPR into the buffer PTR of length LEN bytes.
7279 Return the number of bytes placed in the buffer, or zero
7283 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7288 part = TREE_REALPART (expr);
7289 rsize = native_encode_expr (part, ptr, len);
7292 part = TREE_IMAGPART (expr);
7293 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7296 return rsize + isize;
7300 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7301 specified by EXPR into the buffer PTR of length LEN bytes.
7302 Return the number of bytes placed in the buffer, or zero
7306 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7308 int i, size, offset, count;
7309 tree itype, elem, elements;
7312 elements = TREE_VECTOR_CST_ELTS (expr);
7313 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7314 itype = TREE_TYPE (TREE_TYPE (expr));
7315 size = GET_MODE_SIZE (TYPE_MODE (itype));
7316 for (i = 0; i < count; i++)
7320 elem = TREE_VALUE (elements);
7321 elements = TREE_CHAIN (elements);
7328 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7333 if (offset + size > len)
7335 memset (ptr+offset, 0, size);
7343 /* Subroutine of native_encode_expr. Encode the STRING_CST
7344 specified by EXPR into the buffer PTR of length LEN bytes.
7345 Return the number of bytes placed in the buffer, or zero
7349 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7351 tree type = TREE_TYPE (expr);
7352 HOST_WIDE_INT total_bytes;
7354 if (TREE_CODE (type) != ARRAY_TYPE
7355 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7356 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7357 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7359 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7360 if (total_bytes > len)
7362 if (TREE_STRING_LENGTH (expr) < total_bytes)
7364 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7365 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7366 total_bytes - TREE_STRING_LENGTH (expr));
7369 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7374 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7375 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7376 buffer PTR of length LEN bytes. Return the number of bytes
7377 placed in the buffer, or zero upon failure. */
7380 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7382 switch (TREE_CODE (expr))
7385 return native_encode_int (expr, ptr, len);
7388 return native_encode_real (expr, ptr, len);
7391 return native_encode_complex (expr, ptr, len);
7394 return native_encode_vector (expr, ptr, len);
7397 return native_encode_string (expr, ptr, len);
7405 /* Subroutine of native_interpret_expr. Interpret the contents of
7406 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7407 If the buffer cannot be interpreted, return NULL_TREE. */
7410 native_interpret_int (tree type, const unsigned char *ptr, int len)
7412 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7413 int byte, offset, word, words;
7414 unsigned char value;
7417 if (total_bytes > len)
7419 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7422 result = double_int_zero;
7423 words = total_bytes / UNITS_PER_WORD;
7425 for (byte = 0; byte < total_bytes; byte++)
7427 int bitpos = byte * BITS_PER_UNIT;
7428 if (total_bytes > UNITS_PER_WORD)
7430 word = byte / UNITS_PER_WORD;
7431 if (WORDS_BIG_ENDIAN)
7432 word = (words - 1) - word;
7433 offset = word * UNITS_PER_WORD;
7434 if (BYTES_BIG_ENDIAN)
7435 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7437 offset += byte % UNITS_PER_WORD;
7440 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7441 value = ptr[offset];
7443 if (bitpos < HOST_BITS_PER_WIDE_INT)
7444 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7446 result.high |= (unsigned HOST_WIDE_INT) value
7447 << (bitpos - HOST_BITS_PER_WIDE_INT);
7450 return double_int_to_tree (type, result);
7454 /* Subroutine of native_interpret_expr. Interpret the contents of
7455 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7456 If the buffer cannot be interpreted, return NULL_TREE. */
7459 native_interpret_real (tree type, const unsigned char *ptr, int len)
7461 enum machine_mode mode = TYPE_MODE (type);
7462 int total_bytes = GET_MODE_SIZE (mode);
7463 int byte, offset, word, words, bitpos;
7464 unsigned char value;
7465 /* There are always 32 bits in each long, no matter the size of
7466 the hosts long. We handle floating point representations with
7471 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7472 if (total_bytes > len || total_bytes > 24)
7474 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7476 memset (tmp, 0, sizeof (tmp));
7477 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7478 bitpos += BITS_PER_UNIT)
7480 byte = (bitpos / BITS_PER_UNIT) & 3;
7481 if (UNITS_PER_WORD < 4)
7483 word = byte / UNITS_PER_WORD;
7484 if (WORDS_BIG_ENDIAN)
7485 word = (words - 1) - word;
7486 offset = word * UNITS_PER_WORD;
7487 if (BYTES_BIG_ENDIAN)
7488 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7490 offset += byte % UNITS_PER_WORD;
7493 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7494 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7496 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7499 real_from_target (&r, tmp, mode);
7500 return build_real (type, r);
7504 /* Subroutine of native_interpret_expr. Interpret the contents of
7505 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7506 If the buffer cannot be interpreted, return NULL_TREE. */
7509 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7511 tree etype, rpart, ipart;
7514 etype = TREE_TYPE (type);
7515 size = GET_MODE_SIZE (TYPE_MODE (etype));
7518 rpart = native_interpret_expr (etype, ptr, size);
7521 ipart = native_interpret_expr (etype, ptr+size, size);
7524 return build_complex (type, rpart, ipart);
7528 /* Subroutine of native_interpret_expr. Interpret the contents of
7529 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7530 If the buffer cannot be interpreted, return NULL_TREE. */
7533 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7535 tree etype, elem, elements;
7538 etype = TREE_TYPE (type);
7539 size = GET_MODE_SIZE (TYPE_MODE (etype));
7540 count = TYPE_VECTOR_SUBPARTS (type);
7541 if (size * count > len)
7544 elements = NULL_TREE;
7545 for (i = count - 1; i >= 0; i--)
7547 elem = native_interpret_expr (etype, ptr+(i*size), size);
7550 elements = tree_cons (NULL_TREE, elem, elements);
7552 return build_vector (type, elements);
7556 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7557 the buffer PTR of length LEN as a constant of type TYPE. For
7558 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7559 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7560 return NULL_TREE. */
7563 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7565 switch (TREE_CODE (type))
7570 return native_interpret_int (type, ptr, len);
7573 return native_interpret_real (type, ptr, len);
7576 return native_interpret_complex (type, ptr, len);
7579 return native_interpret_vector (type, ptr, len);
7587 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7588 TYPE at compile-time. If we're unable to perform the conversion
7589 return NULL_TREE. */
7592 fold_view_convert_expr (tree type, tree expr)
7594 /* We support up to 512-bit values (for V8DFmode). */
7595 unsigned char buffer[64];
7598 /* Check that the host and target are sane. */
7599 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7602 len = native_encode_expr (expr, buffer, sizeof (buffer));
7606 return native_interpret_expr (type, buffer, len);
7609 /* Build an expression for the address of T. Folds away INDIRECT_REF
7610 to avoid confusing the gimplify process. */
7613 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7615 /* The size of the object is not relevant when talking about its address. */
7616 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7617 t = TREE_OPERAND (t, 0);
7619 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7620 if (TREE_CODE (t) == INDIRECT_REF
7621 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7623 t = TREE_OPERAND (t, 0);
7625 if (TREE_TYPE (t) != ptrtype)
7627 t = build1 (NOP_EXPR, ptrtype, t);
7628 SET_EXPR_LOCATION (t, loc);
7631 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7633 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7635 if (TREE_TYPE (t) != ptrtype)
7636 t = fold_convert_loc (loc, ptrtype, t);
7640 t = build1 (ADDR_EXPR, ptrtype, t);
7641 SET_EXPR_LOCATION (t, loc);
7647 /* Build an expression for the address of T. */
7650 build_fold_addr_expr_loc (location_t loc, tree t)
7652 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7654 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7657 /* Fold a unary expression of code CODE and type TYPE with operand
7658 OP0. Return the folded expression if folding is successful.
7659 Otherwise, return NULL_TREE. */
7662 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7666 enum tree_code_class kind = TREE_CODE_CLASS (code);
7668 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7669 && TREE_CODE_LENGTH (code) == 1);
7674 if (CONVERT_EXPR_CODE_P (code)
7675 || code == FLOAT_EXPR || code == ABS_EXPR)
7677 /* Don't use STRIP_NOPS, because signedness of argument type
7679 STRIP_SIGN_NOPS (arg0);
7683 /* Strip any conversions that don't change the mode. This
7684 is safe for every expression, except for a comparison
7685 expression because its signedness is derived from its
7688 Note that this is done as an internal manipulation within
7689 the constant folder, in order to find the simplest
7690 representation of the arguments so that their form can be
7691 studied. In any cases, the appropriate type conversions
7692 should be put back in the tree that will get out of the
7698 if (TREE_CODE_CLASS (code) == tcc_unary)
7700 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7701 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7702 fold_build1_loc (loc, code, type,
7703 fold_convert_loc (loc, TREE_TYPE (op0),
7704 TREE_OPERAND (arg0, 1))));
7705 else if (TREE_CODE (arg0) == COND_EXPR)
7707 tree arg01 = TREE_OPERAND (arg0, 1);
7708 tree arg02 = TREE_OPERAND (arg0, 2);
7709 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7710 arg01 = fold_build1_loc (loc, code, type,
7711 fold_convert_loc (loc,
7712 TREE_TYPE (op0), arg01));
7713 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7714 arg02 = fold_build1_loc (loc, code, type,
7715 fold_convert_loc (loc,
7716 TREE_TYPE (op0), arg02));
7717 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7720 /* If this was a conversion, and all we did was to move into
7721 inside the COND_EXPR, bring it back out. But leave it if
7722 it is a conversion from integer to integer and the
7723 result precision is no wider than a word since such a
7724 conversion is cheap and may be optimized away by combine,
7725 while it couldn't if it were outside the COND_EXPR. Then return
7726 so we don't get into an infinite recursion loop taking the
7727 conversion out and then back in. */
7729 if ((CONVERT_EXPR_CODE_P (code)
7730 || code == NON_LVALUE_EXPR)
7731 && TREE_CODE (tem) == COND_EXPR
7732 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7733 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7734 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7735 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7736 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7737 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7738 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7740 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7741 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7742 || flag_syntax_only))
7744 tem = build1 (code, type,
7746 TREE_TYPE (TREE_OPERAND
7747 (TREE_OPERAND (tem, 1), 0)),
7748 TREE_OPERAND (tem, 0),
7749 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7750 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7751 SET_EXPR_LOCATION (tem, loc);
7755 else if (COMPARISON_CLASS_P (arg0))
7757 if (TREE_CODE (type) == BOOLEAN_TYPE)
7759 arg0 = copy_node (arg0);
7760 TREE_TYPE (arg0) = type;
7763 else if (TREE_CODE (type) != INTEGER_TYPE)
7764 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7765 fold_build1_loc (loc, code, type,
7767 fold_build1_loc (loc, code, type,
7768 integer_zero_node));
7775 /* Re-association barriers around constants and other re-association
7776 barriers can be removed. */
7777 if (CONSTANT_CLASS_P (op0)
7778 || TREE_CODE (op0) == PAREN_EXPR)
7779 return fold_convert_loc (loc, type, op0);
7784 case FIX_TRUNC_EXPR:
7785 if (TREE_TYPE (op0) == type)
7788 /* If we have (type) (a CMP b) and type is an integral type, return
7789 new expression involving the new type. */
7790 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7791 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7792 TREE_OPERAND (op0, 1));
7794 /* Handle cases of two conversions in a row. */
7795 if (CONVERT_EXPR_P (op0))
7797 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7798 tree inter_type = TREE_TYPE (op0);
7799 int inside_int = INTEGRAL_TYPE_P (inside_type);
7800 int inside_ptr = POINTER_TYPE_P (inside_type);
7801 int inside_float = FLOAT_TYPE_P (inside_type);
7802 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7803 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7804 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7805 int inter_int = INTEGRAL_TYPE_P (inter_type);
7806 int inter_ptr = POINTER_TYPE_P (inter_type);
7807 int inter_float = FLOAT_TYPE_P (inter_type);
7808 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7809 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7810 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7811 int final_int = INTEGRAL_TYPE_P (type);
7812 int final_ptr = POINTER_TYPE_P (type);
7813 int final_float = FLOAT_TYPE_P (type);
7814 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7815 unsigned int final_prec = TYPE_PRECISION (type);
7816 int final_unsignedp = TYPE_UNSIGNED (type);
7818 /* In addition to the cases of two conversions in a row
7819 handled below, if we are converting something to its own
7820 type via an object of identical or wider precision, neither
7821 conversion is needed. */
7822 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7823 && (((inter_int || inter_ptr) && final_int)
7824 || (inter_float && final_float))
7825 && inter_prec >= final_prec)
7826 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7828 /* Likewise, if the intermediate and initial types are either both
7829 float or both integer, we don't need the middle conversion if the
7830 former is wider than the latter and doesn't change the signedness
7831 (for integers). Avoid this if the final type is a pointer since
7832 then we sometimes need the middle conversion. Likewise if the
7833 final type has a precision not equal to the size of its mode. */
7834 if (((inter_int && inside_int)
7835 || (inter_float && inside_float)
7836 || (inter_vec && inside_vec))
7837 && inter_prec >= inside_prec
7838 && (inter_float || inter_vec
7839 || inter_unsignedp == inside_unsignedp)
7840 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7841 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7843 && (! final_vec || inter_prec == inside_prec))
7844 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7846 /* If we have a sign-extension of a zero-extended value, we can
7847 replace that by a single zero-extension. */
7848 if (inside_int && inter_int && final_int
7849 && inside_prec < inter_prec && inter_prec < final_prec
7850 && inside_unsignedp && !inter_unsignedp)
7851 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7853 /* Two conversions in a row are not needed unless:
7854 - some conversion is floating-point (overstrict for now), or
7855 - some conversion is a vector (overstrict for now), or
7856 - the intermediate type is narrower than both initial and
7858 - the intermediate type and innermost type differ in signedness,
7859 and the outermost type is wider than the intermediate, or
7860 - the initial type is a pointer type and the precisions of the
7861 intermediate and final types differ, or
7862 - the final type is a pointer type and the precisions of the
7863 initial and intermediate types differ. */
7864 if (! inside_float && ! inter_float && ! final_float
7865 && ! inside_vec && ! inter_vec && ! final_vec
7866 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7867 && ! (inside_int && inter_int
7868 && inter_unsignedp != inside_unsignedp
7869 && inter_prec < final_prec)
7870 && ((inter_unsignedp && inter_prec > inside_prec)
7871 == (final_unsignedp && final_prec > inter_prec))
7872 && ! (inside_ptr && inter_prec != final_prec)
7873 && ! (final_ptr && inside_prec != inter_prec)
7874 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7875 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7876 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7879 /* Handle (T *)&A.B.C for A being of type T and B and C
7880 living at offset zero. This occurs frequently in
7881 C++ upcasting and then accessing the base. */
7882 if (TREE_CODE (op0) == ADDR_EXPR
7883 && POINTER_TYPE_P (type)
7884 && handled_component_p (TREE_OPERAND (op0, 0)))
7886 HOST_WIDE_INT bitsize, bitpos;
7888 enum machine_mode mode;
7889 int unsignedp, volatilep;
7890 tree base = TREE_OPERAND (op0, 0);
7891 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7892 &mode, &unsignedp, &volatilep, false);
7893 /* If the reference was to a (constant) zero offset, we can use
7894 the address of the base if it has the same base type
7895 as the result type and the pointer type is unqualified. */
7896 if (! offset && bitpos == 0
7897 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7898 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7899 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7900 return fold_convert_loc (loc, type,
7901 build_fold_addr_expr_loc (loc, base));
7904 if (TREE_CODE (op0) == MODIFY_EXPR
7905 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7906 /* Detect assigning a bitfield. */
7907 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7909 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7911 /* Don't leave an assignment inside a conversion
7912 unless assigning a bitfield. */
7913 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7914 /* First do the assignment, then return converted constant. */
7915 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7916 TREE_NO_WARNING (tem) = 1;
7917 TREE_USED (tem) = 1;
7918 SET_EXPR_LOCATION (tem, loc);
7922 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7923 constants (if x has signed type, the sign bit cannot be set
7924 in c). This folds extension into the BIT_AND_EXPR.
7925 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7926 very likely don't have maximal range for their precision and this
7927 transformation effectively doesn't preserve non-maximal ranges. */
7928 if (TREE_CODE (type) == INTEGER_TYPE
7929 && TREE_CODE (op0) == BIT_AND_EXPR
7930 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7932 tree and_expr = op0;
7933 tree and0 = TREE_OPERAND (and_expr, 0);
7934 tree and1 = TREE_OPERAND (and_expr, 1);
7937 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7938 || (TYPE_PRECISION (type)
7939 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7941 else if (TYPE_PRECISION (TREE_TYPE (and1))
7942 <= HOST_BITS_PER_WIDE_INT
7943 && host_integerp (and1, 1))
7945 unsigned HOST_WIDE_INT cst;
7947 cst = tree_low_cst (and1, 1);
7948 cst &= (HOST_WIDE_INT) -1
7949 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7950 change = (cst == 0);
7951 #ifdef LOAD_EXTEND_OP
7953 && !flag_syntax_only
7954 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7957 tree uns = unsigned_type_for (TREE_TYPE (and0));
7958 and0 = fold_convert_loc (loc, uns, and0);
7959 and1 = fold_convert_loc (loc, uns, and1);
7965 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7966 TREE_INT_CST_HIGH (and1), 0,
7967 TREE_OVERFLOW (and1));
7968 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7969 fold_convert_loc (loc, type, and0), tem);
7973 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7974 when one of the new casts will fold away. Conservatively we assume
7975 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7976 if (POINTER_TYPE_P (type)
7977 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7978 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7979 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7980 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7982 tree arg00 = TREE_OPERAND (arg0, 0);
7983 tree arg01 = TREE_OPERAND (arg0, 1);
7985 return fold_build2_loc (loc,
7986 TREE_CODE (arg0), type,
7987 fold_convert_loc (loc, type, arg00),
7988 fold_convert_loc (loc, sizetype, arg01));
7991 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7992 of the same precision, and X is an integer type not narrower than
7993 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7994 if (INTEGRAL_TYPE_P (type)
7995 && TREE_CODE (op0) == BIT_NOT_EXPR
7996 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7997 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7998 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8000 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8001 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8002 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8003 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8004 fold_convert_loc (loc, type, tem));
8007 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8008 type of X and Y (integer types only). */
8009 if (INTEGRAL_TYPE_P (type)
8010 && TREE_CODE (op0) == MULT_EXPR
8011 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8012 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8014 /* Be careful not to introduce new overflows. */
8016 if (TYPE_OVERFLOW_WRAPS (type))
8019 mult_type = unsigned_type_for (type);
8021 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8023 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8024 fold_convert_loc (loc, mult_type,
8025 TREE_OPERAND (op0, 0)),
8026 fold_convert_loc (loc, mult_type,
8027 TREE_OPERAND (op0, 1)));
8028 return fold_convert_loc (loc, type, tem);
8032 tem = fold_convert_const (code, type, op0);
8033 return tem ? tem : NULL_TREE;
8035 case ADDR_SPACE_CONVERT_EXPR:
8036 if (integer_zerop (arg0))
8037 return fold_convert_const (code, type, arg0);
8040 case FIXED_CONVERT_EXPR:
8041 tem = fold_convert_const (code, type, arg0);
8042 return tem ? tem : NULL_TREE;
8044 case VIEW_CONVERT_EXPR:
8045 if (TREE_TYPE (op0) == type)
8047 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8048 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8049 type, TREE_OPERAND (op0, 0));
8051 /* For integral conversions with the same precision or pointer
8052 conversions use a NOP_EXPR instead. */
8053 if ((INTEGRAL_TYPE_P (type)
8054 || POINTER_TYPE_P (type))
8055 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8056 || POINTER_TYPE_P (TREE_TYPE (op0)))
8057 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8058 return fold_convert_loc (loc, type, op0);
8060 /* Strip inner integral conversions that do not change the precision. */
8061 if (CONVERT_EXPR_P (op0)
8062 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8063 || POINTER_TYPE_P (TREE_TYPE (op0)))
8064 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8065 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8066 && (TYPE_PRECISION (TREE_TYPE (op0))
8067 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8068 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8069 type, TREE_OPERAND (op0, 0));
8071 return fold_view_convert_expr (type, op0);
8074 tem = fold_negate_expr (loc, arg0);
8076 return fold_convert_loc (loc, type, tem);
8080 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8081 return fold_abs_const (arg0, type);
8082 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8083 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8084 /* Convert fabs((double)float) into (double)fabsf(float). */
8085 else if (TREE_CODE (arg0) == NOP_EXPR
8086 && TREE_CODE (type) == REAL_TYPE)
8088 tree targ0 = strip_float_extensions (arg0);
8090 return fold_convert_loc (loc, type,
8091 fold_build1_loc (loc, ABS_EXPR,
8095 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8096 else if (TREE_CODE (arg0) == ABS_EXPR)
8098 else if (tree_expr_nonnegative_p (arg0))
8101 /* Strip sign ops from argument. */
8102 if (TREE_CODE (type) == REAL_TYPE)
8104 tem = fold_strip_sign_ops (arg0);
8106 return fold_build1_loc (loc, ABS_EXPR, type,
8107 fold_convert_loc (loc, type, tem));
8112 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8113 return fold_convert_loc (loc, type, arg0);
8114 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8116 tree itype = TREE_TYPE (type);
8117 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8118 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8119 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8120 negate_expr (ipart));
8122 if (TREE_CODE (arg0) == COMPLEX_CST)
8124 tree itype = TREE_TYPE (type);
8125 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8126 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8127 return build_complex (type, rpart, negate_expr (ipart));
8129 if (TREE_CODE (arg0) == CONJ_EXPR)
8130 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8134 if (TREE_CODE (arg0) == INTEGER_CST)
8135 return fold_not_const (arg0, type);
8136 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8137 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8138 /* Convert ~ (-A) to A - 1. */
8139 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8140 return fold_build2_loc (loc, MINUS_EXPR, type,
8141 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8142 build_int_cst (type, 1));
8143 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8144 else if (INTEGRAL_TYPE_P (type)
8145 && ((TREE_CODE (arg0) == MINUS_EXPR
8146 && integer_onep (TREE_OPERAND (arg0, 1)))
8147 || (TREE_CODE (arg0) == PLUS_EXPR
8148 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8149 return fold_build1_loc (loc, NEGATE_EXPR, type,
8150 fold_convert_loc (loc, type,
8151 TREE_OPERAND (arg0, 0)));
8152 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8153 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8154 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8155 fold_convert_loc (loc, type,
8156 TREE_OPERAND (arg0, 0)))))
8157 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8158 fold_convert_loc (loc, type,
8159 TREE_OPERAND (arg0, 1)));
8160 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8161 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8162 fold_convert_loc (loc, type,
8163 TREE_OPERAND (arg0, 1)))))
8164 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8165 fold_convert_loc (loc, type,
8166 TREE_OPERAND (arg0, 0)), tem);
8167 /* Perform BIT_NOT_EXPR on each element individually. */
8168 else if (TREE_CODE (arg0) == VECTOR_CST)
8170 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8171 int count = TYPE_VECTOR_SUBPARTS (type), i;
8173 for (i = 0; i < count; i++)
8177 elem = TREE_VALUE (elements);
8178 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8179 if (elem == NULL_TREE)
8181 elements = TREE_CHAIN (elements);
8184 elem = build_int_cst (TREE_TYPE (type), -1);
8185 list = tree_cons (NULL_TREE, elem, list);
8188 return build_vector (type, nreverse (list));
8193 case TRUTH_NOT_EXPR:
8194 /* The argument to invert_truthvalue must have Boolean type. */
8195 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8196 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8198 /* Note that the operand of this must be an int
8199 and its values must be 0 or 1.
8200 ("true" is a fixed value perhaps depending on the language,
8201 but we don't handle values other than 1 correctly yet.) */
8202 tem = fold_truth_not_expr (loc, arg0);
8205 return fold_convert_loc (loc, type, tem);
8208 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8209 return fold_convert_loc (loc, type, arg0);
8210 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8211 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8212 TREE_OPERAND (arg0, 1));
8213 if (TREE_CODE (arg0) == COMPLEX_CST)
8214 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8215 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8217 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8218 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8219 fold_build1_loc (loc, REALPART_EXPR, itype,
8220 TREE_OPERAND (arg0, 0)),
8221 fold_build1_loc (loc, REALPART_EXPR, itype,
8222 TREE_OPERAND (arg0, 1)));
8223 return fold_convert_loc (loc, type, tem);
8225 if (TREE_CODE (arg0) == CONJ_EXPR)
8227 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8228 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8229 TREE_OPERAND (arg0, 0));
8230 return fold_convert_loc (loc, type, tem);
8232 if (TREE_CODE (arg0) == CALL_EXPR)
8234 tree fn = get_callee_fndecl (arg0);
8235 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8236 switch (DECL_FUNCTION_CODE (fn))
8238 CASE_FLT_FN (BUILT_IN_CEXPI):
8239 fn = mathfn_built_in (type, BUILT_IN_COS);
8241 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8251 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8252 return fold_convert_loc (loc, type, integer_zero_node);
8253 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8254 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8255 TREE_OPERAND (arg0, 0));
8256 if (TREE_CODE (arg0) == COMPLEX_CST)
8257 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8258 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8260 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8261 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8262 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8263 TREE_OPERAND (arg0, 0)),
8264 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8265 TREE_OPERAND (arg0, 1)));
8266 return fold_convert_loc (loc, type, tem);
8268 if (TREE_CODE (arg0) == CONJ_EXPR)
8270 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8271 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8272 return fold_convert_loc (loc, type, negate_expr (tem));
8274 if (TREE_CODE (arg0) == CALL_EXPR)
8276 tree fn = get_callee_fndecl (arg0);
8277 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8278 switch (DECL_FUNCTION_CODE (fn))
8280 CASE_FLT_FN (BUILT_IN_CEXPI):
8281 fn = mathfn_built_in (type, BUILT_IN_SIN);
8283 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8293 /* Fold *&X to X if X is an lvalue. */
8294 if (TREE_CODE (op0) == ADDR_EXPR)
8296 tree op00 = TREE_OPERAND (op0, 0);
8297 if ((TREE_CODE (op00) == VAR_DECL
8298 || TREE_CODE (op00) == PARM_DECL
8299 || TREE_CODE (op00) == RESULT_DECL)
8300 && !TREE_READONLY (op00))
8307 } /* switch (code) */
8311 /* If the operation was a conversion do _not_ mark a resulting constant
8312 with TREE_OVERFLOW if the original constant was not. These conversions
8313 have implementation defined behavior and retaining the TREE_OVERFLOW
8314 flag here would confuse later passes such as VRP. */
8316 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8317 tree type, tree op0)
8319 tree res = fold_unary_loc (loc, code, type, op0);
8321 && TREE_CODE (res) == INTEGER_CST
8322 && TREE_CODE (op0) == INTEGER_CST
8323 && CONVERT_EXPR_CODE_P (code))
8324 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8329 /* Fold a binary expression of code CODE and type TYPE with operands
8330 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8331 Return the folded expression if folding is successful. Otherwise,
8332 return NULL_TREE. */
8335 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8337 enum tree_code compl_code;
8339 if (code == MIN_EXPR)
8340 compl_code = MAX_EXPR;
8341 else if (code == MAX_EXPR)
8342 compl_code = MIN_EXPR;
8346 /* MIN (MAX (a, b), b) == b. */
8347 if (TREE_CODE (op0) == compl_code
8348 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8349 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8351 /* MIN (MAX (b, a), b) == b. */
8352 if (TREE_CODE (op0) == compl_code
8353 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8354 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8355 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8357 /* MIN (a, MAX (a, b)) == a. */
8358 if (TREE_CODE (op1) == compl_code
8359 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8360 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8361 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8363 /* MIN (a, MAX (b, a)) == a. */
8364 if (TREE_CODE (op1) == compl_code
8365 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8366 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8367 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8372 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8373 by changing CODE to reduce the magnitude of constants involved in
8374 ARG0 of the comparison.
8375 Returns a canonicalized comparison tree if a simplification was
8376 possible, otherwise returns NULL_TREE.
8377 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8378 valid if signed overflow is undefined. */
8381 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8382 tree arg0, tree arg1,
8383 bool *strict_overflow_p)
8385 enum tree_code code0 = TREE_CODE (arg0);
8386 tree t, cst0 = NULL_TREE;
8390 /* Match A +- CST code arg1 and CST code arg1. We can change the
8391 first form only if overflow is undefined. */
8392 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8393 /* In principle pointers also have undefined overflow behavior,
8394 but that causes problems elsewhere. */
8395 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8396 && (code0 == MINUS_EXPR
8397 || code0 == PLUS_EXPR)
8398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8399 || code0 == INTEGER_CST))
8402 /* Identify the constant in arg0 and its sign. */
8403 if (code0 == INTEGER_CST)
8406 cst0 = TREE_OPERAND (arg0, 1);
8407 sgn0 = tree_int_cst_sgn (cst0);
8409 /* Overflowed constants and zero will cause problems. */
8410 if (integer_zerop (cst0)
8411 || TREE_OVERFLOW (cst0))
8414 /* See if we can reduce the magnitude of the constant in
8415 arg0 by changing the comparison code. */
8416 if (code0 == INTEGER_CST)
8418 /* CST <= arg1 -> CST-1 < arg1. */
8419 if (code == LE_EXPR && sgn0 == 1)
8421 /* -CST < arg1 -> -CST-1 <= arg1. */
8422 else if (code == LT_EXPR && sgn0 == -1)
8424 /* CST > arg1 -> CST-1 >= arg1. */
8425 else if (code == GT_EXPR && sgn0 == 1)
8427 /* -CST >= arg1 -> -CST-1 > arg1. */
8428 else if (code == GE_EXPR && sgn0 == -1)
8432 /* arg1 code' CST' might be more canonical. */
8437 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8439 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8441 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8442 else if (code == GT_EXPR
8443 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8445 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8446 else if (code == LE_EXPR
8447 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8449 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8450 else if (code == GE_EXPR
8451 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8455 *strict_overflow_p = true;
8458 /* Now build the constant reduced in magnitude. But not if that
8459 would produce one outside of its types range. */
8460 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8462 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8463 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8465 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8466 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8467 /* We cannot swap the comparison here as that would cause us to
8468 endlessly recurse. */
8471 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8472 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8473 if (code0 != INTEGER_CST)
8474 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8476 /* If swapping might yield to a more canonical form, do so. */
8478 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8480 return fold_build2_loc (loc, code, type, t, arg1);
8483 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8484 overflow further. Try to decrease the magnitude of constants involved
8485 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8486 and put sole constants at the second argument position.
8487 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8490 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8491 tree arg0, tree arg1)
8494 bool strict_overflow_p;
8495 const char * const warnmsg = G_("assuming signed overflow does not occur "
8496 "when reducing constant in comparison");
8498 /* Try canonicalization by simplifying arg0. */
8499 strict_overflow_p = false;
8500 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8501 &strict_overflow_p);
8504 if (strict_overflow_p)
8505 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8509 /* Try canonicalization by simplifying arg1 using the swapped
8511 code = swap_tree_comparison (code);
8512 strict_overflow_p = false;
8513 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8514 &strict_overflow_p);
8515 if (t && strict_overflow_p)
8516 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8520 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8521 space. This is used to avoid issuing overflow warnings for
8522 expressions like &p->x which can not wrap. */
8525 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8527 unsigned HOST_WIDE_INT offset_low, total_low;
8528 HOST_WIDE_INT size, offset_high, total_high;
8530 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8536 if (offset == NULL_TREE)
8541 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8545 offset_low = TREE_INT_CST_LOW (offset);
8546 offset_high = TREE_INT_CST_HIGH (offset);
8549 if (add_double_with_sign (offset_low, offset_high,
8550 bitpos / BITS_PER_UNIT, 0,
8551 &total_low, &total_high,
8555 if (total_high != 0)
8558 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8562 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8564 if (TREE_CODE (base) == ADDR_EXPR)
8566 HOST_WIDE_INT base_size;
8568 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8569 if (base_size > 0 && size < base_size)
8573 return total_low > (unsigned HOST_WIDE_INT) size;
8576 /* Subroutine of fold_binary. This routine performs all of the
8577 transformations that are common to the equality/inequality
8578 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8579 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8580 fold_binary should call fold_binary. Fold a comparison with
8581 tree code CODE and type TYPE with operands OP0 and OP1. Return
8582 the folded comparison or NULL_TREE. */
8585 fold_comparison (location_t loc, enum tree_code code, tree type,
8588 tree arg0, arg1, tem;
8593 STRIP_SIGN_NOPS (arg0);
8594 STRIP_SIGN_NOPS (arg1);
8596 tem = fold_relational_const (code, type, arg0, arg1);
8597 if (tem != NULL_TREE)
8600 /* If one arg is a real or integer constant, put it last. */
8601 if (tree_swap_operands_p (arg0, arg1, true))
8602 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8604 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8605 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8606 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8607 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8608 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8609 && (TREE_CODE (arg1) == INTEGER_CST
8610 && !TREE_OVERFLOW (arg1)))
8612 tree const1 = TREE_OPERAND (arg0, 1);
8614 tree variable = TREE_OPERAND (arg0, 0);
8617 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8619 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8620 TREE_TYPE (arg1), const2, const1);
8622 /* If the constant operation overflowed this can be
8623 simplified as a comparison against INT_MAX/INT_MIN. */
8624 if (TREE_CODE (lhs) == INTEGER_CST
8625 && TREE_OVERFLOW (lhs))
8627 int const1_sgn = tree_int_cst_sgn (const1);
8628 enum tree_code code2 = code;
8630 /* Get the sign of the constant on the lhs if the
8631 operation were VARIABLE + CONST1. */
8632 if (TREE_CODE (arg0) == MINUS_EXPR)
8633 const1_sgn = -const1_sgn;
8635 /* The sign of the constant determines if we overflowed
8636 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8637 Canonicalize to the INT_MIN overflow by swapping the comparison
8639 if (const1_sgn == -1)
8640 code2 = swap_tree_comparison (code);
8642 /* We now can look at the canonicalized case
8643 VARIABLE + 1 CODE2 INT_MIN
8644 and decide on the result. */
8645 if (code2 == LT_EXPR
8647 || code2 == EQ_EXPR)
8648 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8649 else if (code2 == NE_EXPR
8651 || code2 == GT_EXPR)
8652 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8655 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8656 && (TREE_CODE (lhs) != INTEGER_CST
8657 || !TREE_OVERFLOW (lhs)))
8659 fold_overflow_warning ("assuming signed overflow does not occur "
8660 "when changing X +- C1 cmp C2 to "
8662 WARN_STRICT_OVERFLOW_COMPARISON);
8663 return fold_build2_loc (loc, code, type, variable, lhs);
8667 /* For comparisons of pointers we can decompose it to a compile time
8668 comparison of the base objects and the offsets into the object.
8669 This requires at least one operand being an ADDR_EXPR or a
8670 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8671 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8672 && (TREE_CODE (arg0) == ADDR_EXPR
8673 || TREE_CODE (arg1) == ADDR_EXPR
8674 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8675 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8677 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8678 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8679 enum machine_mode mode;
8680 int volatilep, unsignedp;
8681 bool indirect_base0 = false, indirect_base1 = false;
8683 /* Get base and offset for the access. Strip ADDR_EXPR for
8684 get_inner_reference, but put it back by stripping INDIRECT_REF
8685 off the base object if possible. indirect_baseN will be true
8686 if baseN is not an address but refers to the object itself. */
8688 if (TREE_CODE (arg0) == ADDR_EXPR)
8690 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8691 &bitsize, &bitpos0, &offset0, &mode,
8692 &unsignedp, &volatilep, false);
8693 if (TREE_CODE (base0) == INDIRECT_REF)
8694 base0 = TREE_OPERAND (base0, 0);
8696 indirect_base0 = true;
8698 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8700 base0 = TREE_OPERAND (arg0, 0);
8701 offset0 = TREE_OPERAND (arg0, 1);
8705 if (TREE_CODE (arg1) == ADDR_EXPR)
8707 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8708 &bitsize, &bitpos1, &offset1, &mode,
8709 &unsignedp, &volatilep, false);
8710 if (TREE_CODE (base1) == INDIRECT_REF)
8711 base1 = TREE_OPERAND (base1, 0);
8713 indirect_base1 = true;
8715 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8717 base1 = TREE_OPERAND (arg1, 0);
8718 offset1 = TREE_OPERAND (arg1, 1);
8721 /* A local variable can never be pointed to by
8722 the default SSA name of an incoming parameter. */
8723 if ((TREE_CODE (arg0) == ADDR_EXPR
8725 && TREE_CODE (base0) == VAR_DECL
8726 && auto_var_in_fn_p (base0, current_function_decl)
8728 && TREE_CODE (base1) == SSA_NAME
8729 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8730 && SSA_NAME_IS_DEFAULT_DEF (base1))
8731 || (TREE_CODE (arg1) == ADDR_EXPR
8733 && TREE_CODE (base1) == VAR_DECL
8734 && auto_var_in_fn_p (base1, current_function_decl)
8736 && TREE_CODE (base0) == SSA_NAME
8737 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8738 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8740 if (code == NE_EXPR)
8741 return constant_boolean_node (1, type);
8742 else if (code == EQ_EXPR)
8743 return constant_boolean_node (0, type);
8745 /* If we have equivalent bases we might be able to simplify. */
8746 else if (indirect_base0 == indirect_base1
8747 && operand_equal_p (base0, base1, 0))
8749 /* We can fold this expression to a constant if the non-constant
8750 offset parts are equal. */
8751 if ((offset0 == offset1
8752 || (offset0 && offset1
8753 && operand_equal_p (offset0, offset1, 0)))
8756 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8761 && bitpos0 != bitpos1
8762 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8763 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8764 fold_overflow_warning (("assuming pointer wraparound does not "
8765 "occur when comparing P +- C1 with "
8767 WARN_STRICT_OVERFLOW_CONDITIONAL);
8772 return constant_boolean_node (bitpos0 == bitpos1, type);
8774 return constant_boolean_node (bitpos0 != bitpos1, type);
8776 return constant_boolean_node (bitpos0 < bitpos1, type);
8778 return constant_boolean_node (bitpos0 <= bitpos1, type);
8780 return constant_boolean_node (bitpos0 >= bitpos1, type);
8782 return constant_boolean_node (bitpos0 > bitpos1, type);
8786 /* We can simplify the comparison to a comparison of the variable
8787 offset parts if the constant offset parts are equal.
8788 Be careful to use signed size type here because otherwise we
8789 mess with array offsets in the wrong way. This is possible
8790 because pointer arithmetic is restricted to retain within an
8791 object and overflow on pointer differences is undefined as of
8792 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8793 else if (bitpos0 == bitpos1
8794 && ((code == EQ_EXPR || code == NE_EXPR)
8795 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8797 /* By converting to signed size type we cover middle-end pointer
8798 arithmetic which operates on unsigned pointer types of size
8799 type size and ARRAY_REF offsets which are properly sign or
8800 zero extended from their type in case it is narrower than
8802 if (offset0 == NULL_TREE)
8803 offset0 = build_int_cst (ssizetype, 0);
8805 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8806 if (offset1 == NULL_TREE)
8807 offset1 = build_int_cst (ssizetype, 0);
8809 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8813 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8814 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8815 fold_overflow_warning (("assuming pointer wraparound does not "
8816 "occur when comparing P +- C1 with "
8818 WARN_STRICT_OVERFLOW_COMPARISON);
8820 return fold_build2_loc (loc, code, type, offset0, offset1);
8823 /* For non-equal bases we can simplify if they are addresses
8824 of local binding decls or constants. */
8825 else if (indirect_base0 && indirect_base1
8826 /* We know that !operand_equal_p (base0, base1, 0)
8827 because the if condition was false. But make
8828 sure two decls are not the same. */
8830 && TREE_CODE (arg0) == ADDR_EXPR
8831 && TREE_CODE (arg1) == ADDR_EXPR
8832 && (((TREE_CODE (base0) == VAR_DECL
8833 || TREE_CODE (base0) == PARM_DECL)
8834 && (targetm.binds_local_p (base0)
8835 || CONSTANT_CLASS_P (base1)))
8836 || CONSTANT_CLASS_P (base0))
8837 && (((TREE_CODE (base1) == VAR_DECL
8838 || TREE_CODE (base1) == PARM_DECL)
8839 && (targetm.binds_local_p (base1)
8840 || CONSTANT_CLASS_P (base0)))
8841 || CONSTANT_CLASS_P (base1)))
8843 if (code == EQ_EXPR)
8844 return omit_two_operands_loc (loc, type, boolean_false_node,
8846 else if (code == NE_EXPR)
8847 return omit_two_operands_loc (loc, type, boolean_true_node,
8850 /* For equal offsets we can simplify to a comparison of the
8852 else if (bitpos0 == bitpos1
8854 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8856 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8857 && ((offset0 == offset1)
8858 || (offset0 && offset1
8859 && operand_equal_p (offset0, offset1, 0))))
8862 base0 = build_fold_addr_expr_loc (loc, base0);
8864 base1 = build_fold_addr_expr_loc (loc, base1);
8865 return fold_build2_loc (loc, code, type, base0, base1);
8869 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8870 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8871 the resulting offset is smaller in absolute value than the
8873 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8874 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8875 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8876 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8877 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8878 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8879 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8881 tree const1 = TREE_OPERAND (arg0, 1);
8882 tree const2 = TREE_OPERAND (arg1, 1);
8883 tree variable1 = TREE_OPERAND (arg0, 0);
8884 tree variable2 = TREE_OPERAND (arg1, 0);
8886 const char * const warnmsg = G_("assuming signed overflow does not "
8887 "occur when combining constants around "
8890 /* Put the constant on the side where it doesn't overflow and is
8891 of lower absolute value than before. */
8892 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8893 ? MINUS_EXPR : PLUS_EXPR,
8895 if (!TREE_OVERFLOW (cst)
8896 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8898 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8899 return fold_build2_loc (loc, code, type,
8901 fold_build2_loc (loc,
8902 TREE_CODE (arg1), TREE_TYPE (arg1),
8906 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8907 ? MINUS_EXPR : PLUS_EXPR,
8909 if (!TREE_OVERFLOW (cst)
8910 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8912 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8913 return fold_build2_loc (loc, code, type,
8914 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8920 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8921 signed arithmetic case. That form is created by the compiler
8922 often enough for folding it to be of value. One example is in
8923 computing loop trip counts after Operator Strength Reduction. */
8924 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8925 && TREE_CODE (arg0) == MULT_EXPR
8926 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8927 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8928 && integer_zerop (arg1))
8930 tree const1 = TREE_OPERAND (arg0, 1);
8931 tree const2 = arg1; /* zero */
8932 tree variable1 = TREE_OPERAND (arg0, 0);
8933 enum tree_code cmp_code = code;
8935 /* Handle unfolded multiplication by zero. */
8936 if (integer_zerop (const1))
8937 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8939 fold_overflow_warning (("assuming signed overflow does not occur when "
8940 "eliminating multiplication in comparison "
8942 WARN_STRICT_OVERFLOW_COMPARISON);
8944 /* If const1 is negative we swap the sense of the comparison. */
8945 if (tree_int_cst_sgn (const1) < 0)
8946 cmp_code = swap_tree_comparison (cmp_code);
8948 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8951 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8955 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8957 tree targ0 = strip_float_extensions (arg0);
8958 tree targ1 = strip_float_extensions (arg1);
8959 tree newtype = TREE_TYPE (targ0);
8961 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8962 newtype = TREE_TYPE (targ1);
8964 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8965 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8966 return fold_build2_loc (loc, code, type,
8967 fold_convert_loc (loc, newtype, targ0),
8968 fold_convert_loc (loc, newtype, targ1));
8970 /* (-a) CMP (-b) -> b CMP a */
8971 if (TREE_CODE (arg0) == NEGATE_EXPR
8972 && TREE_CODE (arg1) == NEGATE_EXPR)
8973 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8974 TREE_OPERAND (arg0, 0));
8976 if (TREE_CODE (arg1) == REAL_CST)
8978 REAL_VALUE_TYPE cst;
8979 cst = TREE_REAL_CST (arg1);
8981 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8982 if (TREE_CODE (arg0) == NEGATE_EXPR)
8983 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8984 TREE_OPERAND (arg0, 0),
8985 build_real (TREE_TYPE (arg1),
8986 real_value_negate (&cst)));
8988 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8989 /* a CMP (-0) -> a CMP 0 */
8990 if (REAL_VALUE_MINUS_ZERO (cst))
8991 return fold_build2_loc (loc, code, type, arg0,
8992 build_real (TREE_TYPE (arg1), dconst0));
8994 /* x != NaN is always true, other ops are always false. */
8995 if (REAL_VALUE_ISNAN (cst)
8996 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8998 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8999 return omit_one_operand_loc (loc, type, tem, arg0);
9002 /* Fold comparisons against infinity. */
9003 if (REAL_VALUE_ISINF (cst)
9004 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9006 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9007 if (tem != NULL_TREE)
9012 /* If this is a comparison of a real constant with a PLUS_EXPR
9013 or a MINUS_EXPR of a real constant, we can convert it into a
9014 comparison with a revised real constant as long as no overflow
9015 occurs when unsafe_math_optimizations are enabled. */
9016 if (flag_unsafe_math_optimizations
9017 && TREE_CODE (arg1) == REAL_CST
9018 && (TREE_CODE (arg0) == PLUS_EXPR
9019 || TREE_CODE (arg0) == MINUS_EXPR)
9020 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9021 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9022 ? MINUS_EXPR : PLUS_EXPR,
9023 arg1, TREE_OPERAND (arg0, 1), 0))
9024 && !TREE_OVERFLOW (tem))
9025 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9027 /* Likewise, we can simplify a comparison of a real constant with
9028 a MINUS_EXPR whose first operand is also a real constant, i.e.
9029 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9030 floating-point types only if -fassociative-math is set. */
9031 if (flag_associative_math
9032 && TREE_CODE (arg1) == REAL_CST
9033 && TREE_CODE (arg0) == MINUS_EXPR
9034 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9035 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9037 && !TREE_OVERFLOW (tem))
9038 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9039 TREE_OPERAND (arg0, 1), tem);
9041 /* Fold comparisons against built-in math functions. */
9042 if (TREE_CODE (arg1) == REAL_CST
9043 && flag_unsafe_math_optimizations
9044 && ! flag_errno_math)
9046 enum built_in_function fcode = builtin_mathfn_code (arg0);
9048 if (fcode != END_BUILTINS)
9050 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9051 if (tem != NULL_TREE)
9057 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9058 && CONVERT_EXPR_P (arg0))
9060 /* If we are widening one operand of an integer comparison,
9061 see if the other operand is similarly being widened. Perhaps we
9062 can do the comparison in the narrower type. */
9063 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9067 /* Or if we are changing signedness. */
9068 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9073 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9074 constant, we can simplify it. */
9075 if (TREE_CODE (arg1) == INTEGER_CST
9076 && (TREE_CODE (arg0) == MIN_EXPR
9077 || TREE_CODE (arg0) == MAX_EXPR)
9078 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9080 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9085 /* Simplify comparison of something with itself. (For IEEE
9086 floating-point, we can only do some of these simplifications.) */
9087 if (operand_equal_p (arg0, arg1, 0))
9092 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9093 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9094 return constant_boolean_node (1, type);
9099 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9100 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9101 return constant_boolean_node (1, type);
9102 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9105 /* For NE, we can only do this simplification if integer
9106 or we don't honor IEEE floating point NaNs. */
9107 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9108 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9110 /* ... fall through ... */
9113 return constant_boolean_node (0, type);
9119 /* If we are comparing an expression that just has comparisons
9120 of two integer values, arithmetic expressions of those comparisons,
9121 and constants, we can simplify it. There are only three cases
9122 to check: the two values can either be equal, the first can be
9123 greater, or the second can be greater. Fold the expression for
9124 those three values. Since each value must be 0 or 1, we have
9125 eight possibilities, each of which corresponds to the constant 0
9126 or 1 or one of the six possible comparisons.
9128 This handles common cases like (a > b) == 0 but also handles
9129 expressions like ((x > y) - (y > x)) > 0, which supposedly
9130 occur in macroized code. */
9132 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9134 tree cval1 = 0, cval2 = 0;
9137 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9138 /* Don't handle degenerate cases here; they should already
9139 have been handled anyway. */
9140 && cval1 != 0 && cval2 != 0
9141 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9142 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9143 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9144 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9145 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9146 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9147 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9149 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9150 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9152 /* We can't just pass T to eval_subst in case cval1 or cval2
9153 was the same as ARG1. */
9156 = fold_build2_loc (loc, code, type,
9157 eval_subst (loc, arg0, cval1, maxval,
9161 = fold_build2_loc (loc, code, type,
9162 eval_subst (loc, arg0, cval1, maxval,
9166 = fold_build2_loc (loc, code, type,
9167 eval_subst (loc, arg0, cval1, minval,
9171 /* All three of these results should be 0 or 1. Confirm they are.
9172 Then use those values to select the proper code to use. */
9174 if (TREE_CODE (high_result) == INTEGER_CST
9175 && TREE_CODE (equal_result) == INTEGER_CST
9176 && TREE_CODE (low_result) == INTEGER_CST)
9178 /* Make a 3-bit mask with the high-order bit being the
9179 value for `>', the next for '=', and the low for '<'. */
9180 switch ((integer_onep (high_result) * 4)
9181 + (integer_onep (equal_result) * 2)
9182 + integer_onep (low_result))
9186 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9207 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9212 tem = save_expr (build2 (code, type, cval1, cval2));
9213 SET_EXPR_LOCATION (tem, loc);
9216 return fold_build2_loc (loc, code, type, cval1, cval2);
9221 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9222 into a single range test. */
9223 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9224 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9225 && TREE_CODE (arg1) == INTEGER_CST
9226 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9227 && !integer_zerop (TREE_OPERAND (arg0, 1))
9228 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9229 && !TREE_OVERFLOW (arg1))
9231 tem = fold_div_compare (loc, code, type, arg0, arg1);
9232 if (tem != NULL_TREE)
9236 /* Fold ~X op ~Y as Y op X. */
9237 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9238 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9240 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9241 return fold_build2_loc (loc, code, type,
9242 fold_convert_loc (loc, cmp_type,
9243 TREE_OPERAND (arg1, 0)),
9244 TREE_OPERAND (arg0, 0));
9247 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9248 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9249 && TREE_CODE (arg1) == INTEGER_CST)
9251 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9252 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9253 TREE_OPERAND (arg0, 0),
9254 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9255 fold_convert_loc (loc, cmp_type, arg1)));
9262 /* Subroutine of fold_binary. Optimize complex multiplications of the
9263 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9264 argument EXPR represents the expression "z" of type TYPE. */
9267 fold_mult_zconjz (location_t loc, tree type, tree expr)
9269 tree itype = TREE_TYPE (type);
9270 tree rpart, ipart, tem;
9272 if (TREE_CODE (expr) == COMPLEX_EXPR)
9274 rpart = TREE_OPERAND (expr, 0);
9275 ipart = TREE_OPERAND (expr, 1);
9277 else if (TREE_CODE (expr) == COMPLEX_CST)
9279 rpart = TREE_REALPART (expr);
9280 ipart = TREE_IMAGPART (expr);
9284 expr = save_expr (expr);
9285 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9286 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9289 rpart = save_expr (rpart);
9290 ipart = save_expr (ipart);
9291 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9292 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9293 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9294 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9295 fold_convert_loc (loc, itype, integer_zero_node));
9299 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9300 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9301 guarantees that P and N have the same least significant log2(M) bits.
9302 N is not otherwise constrained. In particular, N is not normalized to
9303 0 <= N < M as is common. In general, the precise value of P is unknown.
9304 M is chosen as large as possible such that constant N can be determined.
9306 Returns M and sets *RESIDUE to N.
9308 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9309 account. This is not always possible due to PR 35705.
9312 static unsigned HOST_WIDE_INT
9313 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9314 bool allow_func_align)
9316 enum tree_code code;
9320 code = TREE_CODE (expr);
9321 if (code == ADDR_EXPR)
9323 expr = TREE_OPERAND (expr, 0);
9324 if (handled_component_p (expr))
9326 HOST_WIDE_INT bitsize, bitpos;
9328 enum machine_mode mode;
9329 int unsignedp, volatilep;
9331 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9332 &mode, &unsignedp, &volatilep, false);
9333 *residue = bitpos / BITS_PER_UNIT;
9336 if (TREE_CODE (offset) == INTEGER_CST)
9337 *residue += TREE_INT_CST_LOW (offset);
9339 /* We don't handle more complicated offset expressions. */
9345 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9346 return DECL_ALIGN_UNIT (expr);
9348 else if (code == POINTER_PLUS_EXPR)
9351 unsigned HOST_WIDE_INT modulus;
9352 enum tree_code inner_code;
9354 op0 = TREE_OPERAND (expr, 0);
9356 modulus = get_pointer_modulus_and_residue (op0, residue,
9359 op1 = TREE_OPERAND (expr, 1);
9361 inner_code = TREE_CODE (op1);
9362 if (inner_code == INTEGER_CST)
9364 *residue += TREE_INT_CST_LOW (op1);
9367 else if (inner_code == MULT_EXPR)
9369 op1 = TREE_OPERAND (op1, 1);
9370 if (TREE_CODE (op1) == INTEGER_CST)
9372 unsigned HOST_WIDE_INT align;
9374 /* Compute the greatest power-of-2 divisor of op1. */
9375 align = TREE_INT_CST_LOW (op1);
9378 /* If align is non-zero and less than *modulus, replace
9379 *modulus with align., If align is 0, then either op1 is 0
9380 or the greatest power-of-2 divisor of op1 doesn't fit in an
9381 unsigned HOST_WIDE_INT. In either case, no additional
9382 constraint is imposed. */
9384 modulus = MIN (modulus, align);
9391 /* If we get here, we were unable to determine anything useful about the
9397 /* Fold a binary expression of code CODE and type TYPE with operands
9398 OP0 and OP1. LOC is the location of the resulting expression.
9399 Return the folded expression if folding is successful. Otherwise,
9400 return NULL_TREE. */
9403 fold_binary_loc (location_t loc,
9404 enum tree_code code, tree type, tree op0, tree op1)
9406 enum tree_code_class kind = TREE_CODE_CLASS (code);
9407 tree arg0, arg1, tem;
9408 tree t1 = NULL_TREE;
9409 bool strict_overflow_p;
9411 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9412 && TREE_CODE_LENGTH (code) == 2
9414 && op1 != NULL_TREE);
9419 /* Strip any conversions that don't change the mode. This is
9420 safe for every expression, except for a comparison expression
9421 because its signedness is derived from its operands. So, in
9422 the latter case, only strip conversions that don't change the
9423 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9426 Note that this is done as an internal manipulation within the
9427 constant folder, in order to find the simplest representation
9428 of the arguments so that their form can be studied. In any
9429 cases, the appropriate type conversions should be put back in
9430 the tree that will get out of the constant folder. */
9432 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9434 STRIP_SIGN_NOPS (arg0);
9435 STRIP_SIGN_NOPS (arg1);
9443 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9444 constant but we can't do arithmetic on them. */
9445 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9446 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9447 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9448 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9449 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9450 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9452 if (kind == tcc_binary)
9454 /* Make sure type and arg0 have the same saturating flag. */
9455 gcc_assert (TYPE_SATURATING (type)
9456 == TYPE_SATURATING (TREE_TYPE (arg0)));
9457 tem = const_binop (code, arg0, arg1, 0);
9459 else if (kind == tcc_comparison)
9460 tem = fold_relational_const (code, type, arg0, arg1);
9464 if (tem != NULL_TREE)
9466 if (TREE_TYPE (tem) != type)
9467 tem = fold_convert_loc (loc, type, tem);
9472 /* If this is a commutative operation, and ARG0 is a constant, move it
9473 to ARG1 to reduce the number of tests below. */
9474 if (commutative_tree_code (code)
9475 && tree_swap_operands_p (arg0, arg1, true))
9476 return fold_build2_loc (loc, code, type, op1, op0);
9478 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9480 First check for cases where an arithmetic operation is applied to a
9481 compound, conditional, or comparison operation. Push the arithmetic
9482 operation inside the compound or conditional to see if any folding
9483 can then be done. Convert comparison to conditional for this purpose.
9484 The also optimizes non-constant cases that used to be done in
9487 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9488 one of the operands is a comparison and the other is a comparison, a
9489 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9490 code below would make the expression more complex. Change it to a
9491 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9492 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9494 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9495 || code == EQ_EXPR || code == NE_EXPR)
9496 && ((truth_value_p (TREE_CODE (arg0))
9497 && (truth_value_p (TREE_CODE (arg1))
9498 || (TREE_CODE (arg1) == BIT_AND_EXPR
9499 && integer_onep (TREE_OPERAND (arg1, 1)))))
9500 || (truth_value_p (TREE_CODE (arg1))
9501 && (truth_value_p (TREE_CODE (arg0))
9502 || (TREE_CODE (arg0) == BIT_AND_EXPR
9503 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9505 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9506 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9509 fold_convert_loc (loc, boolean_type_node, arg0),
9510 fold_convert_loc (loc, boolean_type_node, arg1));
9512 if (code == EQ_EXPR)
9513 tem = invert_truthvalue_loc (loc, tem);
9515 return fold_convert_loc (loc, type, tem);
9518 if (TREE_CODE_CLASS (code) == tcc_binary
9519 || TREE_CODE_CLASS (code) == tcc_comparison)
9521 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9523 tem = fold_build2_loc (loc, code, type,
9524 fold_convert_loc (loc, TREE_TYPE (op0),
9525 TREE_OPERAND (arg0, 1)), op1);
9526 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9527 goto fold_binary_exit;
9529 if (TREE_CODE (arg1) == COMPOUND_EXPR
9530 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9532 tem = fold_build2_loc (loc, code, type, op0,
9533 fold_convert_loc (loc, TREE_TYPE (op1),
9534 TREE_OPERAND (arg1, 1)));
9535 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9536 goto fold_binary_exit;
9539 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9541 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9543 /*cond_first_p=*/1);
9544 if (tem != NULL_TREE)
9548 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9550 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9552 /*cond_first_p=*/0);
9553 if (tem != NULL_TREE)
9560 case POINTER_PLUS_EXPR:
9561 /* 0 +p index -> (type)index */
9562 if (integer_zerop (arg0))
9563 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9565 /* PTR +p 0 -> PTR */
9566 if (integer_zerop (arg1))
9567 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9569 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9570 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9571 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9572 return fold_convert_loc (loc, type,
9573 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9574 fold_convert_loc (loc, sizetype,
9576 fold_convert_loc (loc, sizetype,
9579 /* index +p PTR -> PTR +p index */
9580 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9581 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9582 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9583 fold_convert_loc (loc, type, arg1),
9584 fold_convert_loc (loc, sizetype, arg0));
9586 /* (PTR +p B) +p A -> PTR +p (B + A) */
9587 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9590 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9591 tree arg00 = TREE_OPERAND (arg0, 0);
9592 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9593 arg01, fold_convert_loc (loc, sizetype, arg1));
9594 return fold_convert_loc (loc, type,
9595 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9600 /* PTR_CST +p CST -> CST1 */
9601 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9602 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9603 fold_convert_loc (loc, type, arg1));
9605 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9606 of the array. Loop optimizer sometimes produce this type of
9608 if (TREE_CODE (arg0) == ADDR_EXPR)
9610 tem = try_move_mult_to_index (loc, arg0,
9611 fold_convert_loc (loc, sizetype, arg1));
9613 return fold_convert_loc (loc, type, tem);
9619 /* A + (-B) -> A - B */
9620 if (TREE_CODE (arg1) == NEGATE_EXPR)
9621 return fold_build2_loc (loc, MINUS_EXPR, type,
9622 fold_convert_loc (loc, type, arg0),
9623 fold_convert_loc (loc, type,
9624 TREE_OPERAND (arg1, 0)));
9625 /* (-A) + B -> B - A */
9626 if (TREE_CODE (arg0) == NEGATE_EXPR
9627 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9628 return fold_build2_loc (loc, MINUS_EXPR, type,
9629 fold_convert_loc (loc, type, arg1),
9630 fold_convert_loc (loc, type,
9631 TREE_OPERAND (arg0, 0)));
9633 if (INTEGRAL_TYPE_P (type))
9635 /* Convert ~A + 1 to -A. */
9636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9637 && integer_onep (arg1))
9638 return fold_build1_loc (loc, NEGATE_EXPR, type,
9639 fold_convert_loc (loc, type,
9640 TREE_OPERAND (arg0, 0)));
9643 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9644 && !TYPE_OVERFLOW_TRAPS (type))
9646 tree tem = TREE_OPERAND (arg0, 0);
9649 if (operand_equal_p (tem, arg1, 0))
9651 t1 = build_int_cst_type (type, -1);
9652 return omit_one_operand_loc (loc, type, t1, arg1);
9657 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9658 && !TYPE_OVERFLOW_TRAPS (type))
9660 tree tem = TREE_OPERAND (arg1, 0);
9663 if (operand_equal_p (arg0, tem, 0))
9665 t1 = build_int_cst_type (type, -1);
9666 return omit_one_operand_loc (loc, type, t1, arg0);
9670 /* X + (X / CST) * -CST is X % CST. */
9671 if (TREE_CODE (arg1) == MULT_EXPR
9672 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9673 && operand_equal_p (arg0,
9674 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9676 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9677 tree cst1 = TREE_OPERAND (arg1, 1);
9678 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9680 if (sum && integer_zerop (sum))
9681 return fold_convert_loc (loc, type,
9682 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9683 TREE_TYPE (arg0), arg0,
9688 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9689 same or one. Make sure type is not saturating.
9690 fold_plusminus_mult_expr will re-associate. */
9691 if ((TREE_CODE (arg0) == MULT_EXPR
9692 || TREE_CODE (arg1) == MULT_EXPR)
9693 && !TYPE_SATURATING (type)
9694 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9696 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9701 if (! FLOAT_TYPE_P (type))
9703 if (integer_zerop (arg1))
9704 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9706 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9707 with a constant, and the two constants have no bits in common,
9708 we should treat this as a BIT_IOR_EXPR since this may produce more
9710 if (TREE_CODE (arg0) == BIT_AND_EXPR
9711 && TREE_CODE (arg1) == BIT_AND_EXPR
9712 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9713 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9714 && integer_zerop (const_binop (BIT_AND_EXPR,
9715 TREE_OPERAND (arg0, 1),
9716 TREE_OPERAND (arg1, 1), 0)))
9718 code = BIT_IOR_EXPR;
9722 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9723 (plus (plus (mult) (mult)) (foo)) so that we can
9724 take advantage of the factoring cases below. */
9725 if (((TREE_CODE (arg0) == PLUS_EXPR
9726 || TREE_CODE (arg0) == MINUS_EXPR)
9727 && TREE_CODE (arg1) == MULT_EXPR)
9728 || ((TREE_CODE (arg1) == PLUS_EXPR
9729 || TREE_CODE (arg1) == MINUS_EXPR)
9730 && TREE_CODE (arg0) == MULT_EXPR))
9732 tree parg0, parg1, parg, marg;
9733 enum tree_code pcode;
9735 if (TREE_CODE (arg1) == MULT_EXPR)
9736 parg = arg0, marg = arg1;
9738 parg = arg1, marg = arg0;
9739 pcode = TREE_CODE (parg);
9740 parg0 = TREE_OPERAND (parg, 0);
9741 parg1 = TREE_OPERAND (parg, 1);
9745 if (TREE_CODE (parg0) == MULT_EXPR
9746 && TREE_CODE (parg1) != MULT_EXPR)
9747 return fold_build2_loc (loc, pcode, type,
9748 fold_build2_loc (loc, PLUS_EXPR, type,
9749 fold_convert_loc (loc, type,
9751 fold_convert_loc (loc, type,
9753 fold_convert_loc (loc, type, parg1));
9754 if (TREE_CODE (parg0) != MULT_EXPR
9755 && TREE_CODE (parg1) == MULT_EXPR)
9757 fold_build2_loc (loc, PLUS_EXPR, type,
9758 fold_convert_loc (loc, type, parg0),
9759 fold_build2_loc (loc, pcode, type,
9760 fold_convert_loc (loc, type, marg),
9761 fold_convert_loc (loc, type,
9767 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9768 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9769 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9771 /* Likewise if the operands are reversed. */
9772 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9773 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9775 /* Convert X + -C into X - C. */
9776 if (TREE_CODE (arg1) == REAL_CST
9777 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9779 tem = fold_negate_const (arg1, type);
9780 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9781 return fold_build2_loc (loc, MINUS_EXPR, type,
9782 fold_convert_loc (loc, type, arg0),
9783 fold_convert_loc (loc, type, tem));
9786 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9787 to __complex__ ( x, y ). This is not the same for SNaNs or
9788 if signed zeros are involved. */
9789 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9790 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9791 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9793 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9794 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9795 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9796 bool arg0rz = false, arg0iz = false;
9797 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9798 || (arg0i && (arg0iz = real_zerop (arg0i))))
9800 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9801 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9802 if (arg0rz && arg1i && real_zerop (arg1i))
9804 tree rp = arg1r ? arg1r
9805 : build1 (REALPART_EXPR, rtype, arg1);
9806 tree ip = arg0i ? arg0i
9807 : build1 (IMAGPART_EXPR, rtype, arg0);
9808 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9810 else if (arg0iz && arg1r && real_zerop (arg1r))
9812 tree rp = arg0r ? arg0r
9813 : build1 (REALPART_EXPR, rtype, arg0);
9814 tree ip = arg1i ? arg1i
9815 : build1 (IMAGPART_EXPR, rtype, arg1);
9816 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9821 if (flag_unsafe_math_optimizations
9822 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9823 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9824 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9827 /* Convert x+x into x*2.0. */
9828 if (operand_equal_p (arg0, arg1, 0)
9829 && SCALAR_FLOAT_TYPE_P (type))
9830 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9831 build_real (type, dconst2));
9833 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9834 We associate floats only if the user has specified
9835 -fassociative-math. */
9836 if (flag_associative_math
9837 && TREE_CODE (arg1) == PLUS_EXPR
9838 && TREE_CODE (arg0) != MULT_EXPR)
9840 tree tree10 = TREE_OPERAND (arg1, 0);
9841 tree tree11 = TREE_OPERAND (arg1, 1);
9842 if (TREE_CODE (tree11) == MULT_EXPR
9843 && TREE_CODE (tree10) == MULT_EXPR)
9846 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9847 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9850 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9851 We associate floats only if the user has specified
9852 -fassociative-math. */
9853 if (flag_associative_math
9854 && TREE_CODE (arg0) == PLUS_EXPR
9855 && TREE_CODE (arg1) != MULT_EXPR)
9857 tree tree00 = TREE_OPERAND (arg0, 0);
9858 tree tree01 = TREE_OPERAND (arg0, 1);
9859 if (TREE_CODE (tree01) == MULT_EXPR
9860 && TREE_CODE (tree00) == MULT_EXPR)
9863 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9864 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9870 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9871 is a rotate of A by C1 bits. */
9872 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9873 is a rotate of A by B bits. */
9875 enum tree_code code0, code1;
9877 code0 = TREE_CODE (arg0);
9878 code1 = TREE_CODE (arg1);
9879 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9880 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9881 && operand_equal_p (TREE_OPERAND (arg0, 0),
9882 TREE_OPERAND (arg1, 0), 0)
9883 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9884 TYPE_UNSIGNED (rtype))
9885 /* Only create rotates in complete modes. Other cases are not
9886 expanded properly. */
9887 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9889 tree tree01, tree11;
9890 enum tree_code code01, code11;
9892 tree01 = TREE_OPERAND (arg0, 1);
9893 tree11 = TREE_OPERAND (arg1, 1);
9894 STRIP_NOPS (tree01);
9895 STRIP_NOPS (tree11);
9896 code01 = TREE_CODE (tree01);
9897 code11 = TREE_CODE (tree11);
9898 if (code01 == INTEGER_CST
9899 && code11 == INTEGER_CST
9900 && TREE_INT_CST_HIGH (tree01) == 0
9901 && TREE_INT_CST_HIGH (tree11) == 0
9902 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9903 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9905 tem = build2 (LROTATE_EXPR,
9906 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9907 TREE_OPERAND (arg0, 0),
9908 code0 == LSHIFT_EXPR
9910 SET_EXPR_LOCATION (tem, loc);
9911 return fold_convert_loc (loc, type, tem);
9913 else if (code11 == MINUS_EXPR)
9915 tree tree110, tree111;
9916 tree110 = TREE_OPERAND (tree11, 0);
9917 tree111 = TREE_OPERAND (tree11, 1);
9918 STRIP_NOPS (tree110);
9919 STRIP_NOPS (tree111);
9920 if (TREE_CODE (tree110) == INTEGER_CST
9921 && 0 == compare_tree_int (tree110,
9923 (TREE_TYPE (TREE_OPERAND
9925 && operand_equal_p (tree01, tree111, 0))
9927 fold_convert_loc (loc, type,
9928 build2 ((code0 == LSHIFT_EXPR
9931 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9932 TREE_OPERAND (arg0, 0), tree01));
9934 else if (code01 == MINUS_EXPR)
9936 tree tree010, tree011;
9937 tree010 = TREE_OPERAND (tree01, 0);
9938 tree011 = TREE_OPERAND (tree01, 1);
9939 STRIP_NOPS (tree010);
9940 STRIP_NOPS (tree011);
9941 if (TREE_CODE (tree010) == INTEGER_CST
9942 && 0 == compare_tree_int (tree010,
9944 (TREE_TYPE (TREE_OPERAND
9946 && operand_equal_p (tree11, tree011, 0))
9947 return fold_convert_loc
9949 build2 ((code0 != LSHIFT_EXPR
9952 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9953 TREE_OPERAND (arg0, 0), tree11));
9959 /* In most languages, can't associate operations on floats through
9960 parentheses. Rather than remember where the parentheses were, we
9961 don't associate floats at all, unless the user has specified
9963 And, we need to make sure type is not saturating. */
9965 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9966 && !TYPE_SATURATING (type))
9968 tree var0, con0, lit0, minus_lit0;
9969 tree var1, con1, lit1, minus_lit1;
9972 /* Split both trees into variables, constants, and literals. Then
9973 associate each group together, the constants with literals,
9974 then the result with variables. This increases the chances of
9975 literals being recombined later and of generating relocatable
9976 expressions for the sum of a constant and literal. */
9977 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9978 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9979 code == MINUS_EXPR);
9981 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9982 if (code == MINUS_EXPR)
9985 /* With undefined overflow we can only associate constants with one
9986 variable, and constants whose association doesn't overflow. */
9987 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9988 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9995 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9996 tmp0 = TREE_OPERAND (tmp0, 0);
9997 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9998 tmp1 = TREE_OPERAND (tmp1, 0);
9999 /* The only case we can still associate with two variables
10000 is if they are the same, modulo negation. */
10001 if (!operand_equal_p (tmp0, tmp1, 0))
10005 if (ok && lit0 && lit1)
10007 tree tmp0 = fold_convert (type, lit0);
10008 tree tmp1 = fold_convert (type, lit1);
10010 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10011 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10016 /* Only do something if we found more than two objects. Otherwise,
10017 nothing has changed and we risk infinite recursion. */
10019 && (2 < ((var0 != 0) + (var1 != 0)
10020 + (con0 != 0) + (con1 != 0)
10021 + (lit0 != 0) + (lit1 != 0)
10022 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10024 var0 = associate_trees (loc, var0, var1, code, type);
10025 con0 = associate_trees (loc, con0, con1, code, type);
10026 lit0 = associate_trees (loc, lit0, lit1, code, type);
10027 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10029 /* Preserve the MINUS_EXPR if the negative part of the literal is
10030 greater than the positive part. Otherwise, the multiplicative
10031 folding code (i.e extract_muldiv) may be fooled in case
10032 unsigned constants are subtracted, like in the following
10033 example: ((X*2 + 4) - 8U)/2. */
10034 if (minus_lit0 && lit0)
10036 if (TREE_CODE (lit0) == INTEGER_CST
10037 && TREE_CODE (minus_lit0) == INTEGER_CST
10038 && tree_int_cst_lt (lit0, minus_lit0))
10040 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10046 lit0 = associate_trees (loc, lit0, minus_lit0,
10055 fold_convert_loc (loc, type,
10056 associate_trees (loc, var0, minus_lit0,
10057 MINUS_EXPR, type));
10060 con0 = associate_trees (loc, con0, minus_lit0,
10063 fold_convert_loc (loc, type,
10064 associate_trees (loc, var0, con0,
10069 con0 = associate_trees (loc, con0, lit0, code, type);
10071 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10079 /* Pointer simplifications for subtraction, simple reassociations. */
10080 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10082 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10083 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10084 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10086 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10087 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10088 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10089 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10090 return fold_build2_loc (loc, PLUS_EXPR, type,
10091 fold_build2_loc (loc, MINUS_EXPR, type,
10093 fold_build2_loc (loc, MINUS_EXPR, type,
10096 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10097 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10099 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10100 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10101 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10102 fold_convert_loc (loc, type, arg1));
10104 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10107 /* A - (-B) -> A + B */
10108 if (TREE_CODE (arg1) == NEGATE_EXPR)
10109 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10110 fold_convert_loc (loc, type,
10111 TREE_OPERAND (arg1, 0)));
10112 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10113 if (TREE_CODE (arg0) == NEGATE_EXPR
10114 && (FLOAT_TYPE_P (type)
10115 || INTEGRAL_TYPE_P (type))
10116 && negate_expr_p (arg1)
10117 && reorder_operands_p (arg0, arg1))
10118 return fold_build2_loc (loc, MINUS_EXPR, type,
10119 fold_convert_loc (loc, type,
10120 negate_expr (arg1)),
10121 fold_convert_loc (loc, type,
10122 TREE_OPERAND (arg0, 0)));
10123 /* Convert -A - 1 to ~A. */
10124 if (INTEGRAL_TYPE_P (type)
10125 && TREE_CODE (arg0) == NEGATE_EXPR
10126 && integer_onep (arg1)
10127 && !TYPE_OVERFLOW_TRAPS (type))
10128 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10129 fold_convert_loc (loc, type,
10130 TREE_OPERAND (arg0, 0)));
10132 /* Convert -1 - A to ~A. */
10133 if (INTEGRAL_TYPE_P (type)
10134 && integer_all_onesp (arg0))
10135 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10138 /* X - (X / CST) * CST is X % CST. */
10139 if (INTEGRAL_TYPE_P (type)
10140 && TREE_CODE (arg1) == MULT_EXPR
10141 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10142 && operand_equal_p (arg0,
10143 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10144 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10145 TREE_OPERAND (arg1, 1), 0))
10147 fold_convert_loc (loc, type,
10148 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10149 arg0, TREE_OPERAND (arg1, 1)));
10151 if (! FLOAT_TYPE_P (type))
10153 if (integer_zerop (arg0))
10154 return negate_expr (fold_convert_loc (loc, type, arg1));
10155 if (integer_zerop (arg1))
10156 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10158 /* Fold A - (A & B) into ~B & A. */
10159 if (!TREE_SIDE_EFFECTS (arg0)
10160 && TREE_CODE (arg1) == BIT_AND_EXPR)
10162 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10164 tree arg10 = fold_convert_loc (loc, type,
10165 TREE_OPERAND (arg1, 0));
10166 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10167 fold_build1_loc (loc, BIT_NOT_EXPR,
10169 fold_convert_loc (loc, type, arg0));
10171 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10173 tree arg11 = fold_convert_loc (loc,
10174 type, TREE_OPERAND (arg1, 1));
10175 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10176 fold_build1_loc (loc, BIT_NOT_EXPR,
10178 fold_convert_loc (loc, type, arg0));
10182 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10183 any power of 2 minus 1. */
10184 if (TREE_CODE (arg0) == BIT_AND_EXPR
10185 && TREE_CODE (arg1) == BIT_AND_EXPR
10186 && operand_equal_p (TREE_OPERAND (arg0, 0),
10187 TREE_OPERAND (arg1, 0), 0))
10189 tree mask0 = TREE_OPERAND (arg0, 1);
10190 tree mask1 = TREE_OPERAND (arg1, 1);
10191 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10193 if (operand_equal_p (tem, mask1, 0))
10195 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10196 TREE_OPERAND (arg0, 0), mask1);
10197 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10202 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10203 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10204 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10206 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10207 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10208 (-ARG1 + ARG0) reduces to -ARG1. */
10209 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10210 return negate_expr (fold_convert_loc (loc, type, arg1));
10212 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10213 __complex__ ( x, -y ). This is not the same for SNaNs or if
10214 signed zeros are involved. */
10215 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10216 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10217 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10219 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10220 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10221 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10222 bool arg0rz = false, arg0iz = false;
10223 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10224 || (arg0i && (arg0iz = real_zerop (arg0i))))
10226 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10227 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10228 if (arg0rz && arg1i && real_zerop (arg1i))
10230 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10232 : build1 (REALPART_EXPR, rtype, arg1));
10233 tree ip = arg0i ? arg0i
10234 : build1 (IMAGPART_EXPR, rtype, arg0);
10235 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10237 else if (arg0iz && arg1r && real_zerop (arg1r))
10239 tree rp = arg0r ? arg0r
10240 : build1 (REALPART_EXPR, rtype, arg0);
10241 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10243 : build1 (IMAGPART_EXPR, rtype, arg1));
10244 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10249 /* Fold &x - &x. This can happen from &x.foo - &x.
10250 This is unsafe for certain floats even in non-IEEE formats.
10251 In IEEE, it is unsafe because it does wrong for NaNs.
10252 Also note that operand_equal_p is always false if an operand
10255 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10256 && operand_equal_p (arg0, arg1, 0))
10257 return fold_convert_loc (loc, type, integer_zero_node);
10259 /* A - B -> A + (-B) if B is easily negatable. */
10260 if (negate_expr_p (arg1)
10261 && ((FLOAT_TYPE_P (type)
10262 /* Avoid this transformation if B is a positive REAL_CST. */
10263 && (TREE_CODE (arg1) != REAL_CST
10264 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10265 || INTEGRAL_TYPE_P (type)))
10266 return fold_build2_loc (loc, PLUS_EXPR, type,
10267 fold_convert_loc (loc, type, arg0),
10268 fold_convert_loc (loc, type,
10269 negate_expr (arg1)));
10271 /* Try folding difference of addresses. */
10273 HOST_WIDE_INT diff;
10275 if ((TREE_CODE (arg0) == ADDR_EXPR
10276 || TREE_CODE (arg1) == ADDR_EXPR)
10277 && ptr_difference_const (arg0, arg1, &diff))
10278 return build_int_cst_type (type, diff);
10281 /* Fold &a[i] - &a[j] to i-j. */
10282 if (TREE_CODE (arg0) == ADDR_EXPR
10283 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10284 && TREE_CODE (arg1) == ADDR_EXPR
10285 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10287 tree aref0 = TREE_OPERAND (arg0, 0);
10288 tree aref1 = TREE_OPERAND (arg1, 0);
10289 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10290 TREE_OPERAND (aref1, 0), 0))
10292 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10293 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10294 tree esz = array_ref_element_size (aref0);
10295 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10296 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10297 fold_convert_loc (loc, type, esz));
10302 if (FLOAT_TYPE_P (type)
10303 && flag_unsafe_math_optimizations
10304 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10305 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10306 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10309 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10310 same or one. Make sure type is not saturating.
10311 fold_plusminus_mult_expr will re-associate. */
10312 if ((TREE_CODE (arg0) == MULT_EXPR
10313 || TREE_CODE (arg1) == MULT_EXPR)
10314 && !TYPE_SATURATING (type)
10315 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10317 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10325 /* (-A) * (-B) -> A * B */
10326 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10327 return fold_build2_loc (loc, MULT_EXPR, type,
10328 fold_convert_loc (loc, type,
10329 TREE_OPERAND (arg0, 0)),
10330 fold_convert_loc (loc, type,
10331 negate_expr (arg1)));
10332 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10333 return fold_build2_loc (loc, MULT_EXPR, type,
10334 fold_convert_loc (loc, type,
10335 negate_expr (arg0)),
10336 fold_convert_loc (loc, type,
10337 TREE_OPERAND (arg1, 0)));
10339 if (! FLOAT_TYPE_P (type))
10341 if (integer_zerop (arg1))
10342 return omit_one_operand_loc (loc, type, arg1, arg0);
10343 if (integer_onep (arg1))
10344 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10345 /* Transform x * -1 into -x. Make sure to do the negation
10346 on the original operand with conversions not stripped
10347 because we can only strip non-sign-changing conversions. */
10348 if (integer_all_onesp (arg1))
10349 return fold_convert_loc (loc, type, negate_expr (op0));
10350 /* Transform x * -C into -x * C if x is easily negatable. */
10351 if (TREE_CODE (arg1) == INTEGER_CST
10352 && tree_int_cst_sgn (arg1) == -1
10353 && negate_expr_p (arg0)
10354 && (tem = negate_expr (arg1)) != arg1
10355 && !TREE_OVERFLOW (tem))
10356 return fold_build2_loc (loc, MULT_EXPR, type,
10357 fold_convert_loc (loc, type,
10358 negate_expr (arg0)),
10361 /* (a * (1 << b)) is (a << b) */
10362 if (TREE_CODE (arg1) == LSHIFT_EXPR
10363 && integer_onep (TREE_OPERAND (arg1, 0)))
10364 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10365 TREE_OPERAND (arg1, 1));
10366 if (TREE_CODE (arg0) == LSHIFT_EXPR
10367 && integer_onep (TREE_OPERAND (arg0, 0)))
10368 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10369 TREE_OPERAND (arg0, 1));
10371 /* (A + A) * C -> A * 2 * C */
10372 if (TREE_CODE (arg0) == PLUS_EXPR
10373 && TREE_CODE (arg1) == INTEGER_CST
10374 && operand_equal_p (TREE_OPERAND (arg0, 0),
10375 TREE_OPERAND (arg0, 1), 0))
10376 return fold_build2_loc (loc, MULT_EXPR, type,
10377 omit_one_operand_loc (loc, type,
10378 TREE_OPERAND (arg0, 0),
10379 TREE_OPERAND (arg0, 1)),
10380 fold_build2_loc (loc, MULT_EXPR, type,
10381 build_int_cst (type, 2) , arg1));
10383 strict_overflow_p = false;
10384 if (TREE_CODE (arg1) == INTEGER_CST
10385 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10386 &strict_overflow_p)))
10388 if (strict_overflow_p)
10389 fold_overflow_warning (("assuming signed overflow does not "
10390 "occur when simplifying "
10392 WARN_STRICT_OVERFLOW_MISC);
10393 return fold_convert_loc (loc, type, tem);
10396 /* Optimize z * conj(z) for integer complex numbers. */
10397 if (TREE_CODE (arg0) == CONJ_EXPR
10398 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10399 return fold_mult_zconjz (loc, type, arg1);
10400 if (TREE_CODE (arg1) == CONJ_EXPR
10401 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10402 return fold_mult_zconjz (loc, type, arg0);
10406 /* Maybe fold x * 0 to 0. The expressions aren't the same
10407 when x is NaN, since x * 0 is also NaN. Nor are they the
10408 same in modes with signed zeros, since multiplying a
10409 negative value by 0 gives -0, not +0. */
10410 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10411 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10412 && real_zerop (arg1))
10413 return omit_one_operand_loc (loc, type, arg1, arg0);
10414 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10415 Likewise for complex arithmetic with signed zeros. */
10416 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10417 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10418 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10419 && real_onep (arg1))
10420 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10422 /* Transform x * -1.0 into -x. */
10423 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10424 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10425 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10426 && real_minus_onep (arg1))
10427 return fold_convert_loc (loc, type, negate_expr (arg0));
10429 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10430 the result for floating point types due to rounding so it is applied
10431 only if -fassociative-math was specify. */
10432 if (flag_associative_math
10433 && TREE_CODE (arg0) == RDIV_EXPR
10434 && TREE_CODE (arg1) == REAL_CST
10435 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10437 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10440 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10441 TREE_OPERAND (arg0, 1));
10444 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10445 if (operand_equal_p (arg0, arg1, 0))
10447 tree tem = fold_strip_sign_ops (arg0);
10448 if (tem != NULL_TREE)
10450 tem = fold_convert_loc (loc, type, tem);
10451 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10455 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10456 This is not the same for NaNs or if signed zeros are
10458 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10459 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10460 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10461 && TREE_CODE (arg1) == COMPLEX_CST
10462 && real_zerop (TREE_REALPART (arg1)))
10464 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10465 if (real_onep (TREE_IMAGPART (arg1)))
10467 fold_build2_loc (loc, COMPLEX_EXPR, type,
10468 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10470 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10471 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10473 fold_build2_loc (loc, COMPLEX_EXPR, type,
10474 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10475 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10479 /* Optimize z * conj(z) for floating point complex numbers.
10480 Guarded by flag_unsafe_math_optimizations as non-finite
10481 imaginary components don't produce scalar results. */
10482 if (flag_unsafe_math_optimizations
10483 && TREE_CODE (arg0) == CONJ_EXPR
10484 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10485 return fold_mult_zconjz (loc, type, arg1);
10486 if (flag_unsafe_math_optimizations
10487 && TREE_CODE (arg1) == CONJ_EXPR
10488 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10489 return fold_mult_zconjz (loc, type, arg0);
10491 if (flag_unsafe_math_optimizations)
10493 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10494 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10496 /* Optimizations of root(...)*root(...). */
10497 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10500 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10501 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10503 /* Optimize sqrt(x)*sqrt(x) as x. */
10504 if (BUILTIN_SQRT_P (fcode0)
10505 && operand_equal_p (arg00, arg10, 0)
10506 && ! HONOR_SNANS (TYPE_MODE (type)))
10509 /* Optimize root(x)*root(y) as root(x*y). */
10510 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10511 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10512 return build_call_expr_loc (loc, rootfn, 1, arg);
10515 /* Optimize expN(x)*expN(y) as expN(x+y). */
10516 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10518 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10519 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10520 CALL_EXPR_ARG (arg0, 0),
10521 CALL_EXPR_ARG (arg1, 0));
10522 return build_call_expr_loc (loc, expfn, 1, arg);
10525 /* Optimizations of pow(...)*pow(...). */
10526 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10527 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10528 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10530 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10531 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10532 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10533 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10535 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10536 if (operand_equal_p (arg01, arg11, 0))
10538 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10539 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10541 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10544 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10545 if (operand_equal_p (arg00, arg10, 0))
10547 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10548 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10550 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10554 /* Optimize tan(x)*cos(x) as sin(x). */
10555 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10556 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10557 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10558 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10559 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10560 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10561 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10562 CALL_EXPR_ARG (arg1, 0), 0))
10564 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10566 if (sinfn != NULL_TREE)
10567 return build_call_expr_loc (loc, sinfn, 1,
10568 CALL_EXPR_ARG (arg0, 0));
10571 /* Optimize x*pow(x,c) as pow(x,c+1). */
10572 if (fcode1 == BUILT_IN_POW
10573 || fcode1 == BUILT_IN_POWF
10574 || fcode1 == BUILT_IN_POWL)
10576 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10577 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10578 if (TREE_CODE (arg11) == REAL_CST
10579 && !TREE_OVERFLOW (arg11)
10580 && operand_equal_p (arg0, arg10, 0))
10582 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10586 c = TREE_REAL_CST (arg11);
10587 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10588 arg = build_real (type, c);
10589 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10593 /* Optimize pow(x,c)*x as pow(x,c+1). */
10594 if (fcode0 == BUILT_IN_POW
10595 || fcode0 == BUILT_IN_POWF
10596 || fcode0 == BUILT_IN_POWL)
10598 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10599 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10600 if (TREE_CODE (arg01) == REAL_CST
10601 && !TREE_OVERFLOW (arg01)
10602 && operand_equal_p (arg1, arg00, 0))
10604 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10608 c = TREE_REAL_CST (arg01);
10609 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10610 arg = build_real (type, c);
10611 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10615 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10616 if (optimize_function_for_speed_p (cfun)
10617 && operand_equal_p (arg0, arg1, 0))
10619 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10623 tree arg = build_real (type, dconst2);
10624 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10633 if (integer_all_onesp (arg1))
10634 return omit_one_operand_loc (loc, type, arg1, arg0);
10635 if (integer_zerop (arg1))
10636 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10637 if (operand_equal_p (arg0, arg1, 0))
10638 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10640 /* ~X | X is -1. */
10641 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10642 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10644 t1 = fold_convert_loc (loc, type, integer_zero_node);
10645 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10646 return omit_one_operand_loc (loc, type, t1, arg1);
10649 /* X | ~X is -1. */
10650 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10651 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10653 t1 = fold_convert_loc (loc, type, integer_zero_node);
10654 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10655 return omit_one_operand_loc (loc, type, t1, arg0);
10658 /* Canonicalize (X & C1) | C2. */
10659 if (TREE_CODE (arg0) == BIT_AND_EXPR
10660 && TREE_CODE (arg1) == INTEGER_CST
10661 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10663 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10664 int width = TYPE_PRECISION (type), w;
10665 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10666 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10667 hi2 = TREE_INT_CST_HIGH (arg1);
10668 lo2 = TREE_INT_CST_LOW (arg1);
10670 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10671 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10672 return omit_one_operand_loc (loc, type, arg1,
10673 TREE_OPERAND (arg0, 0));
10675 if (width > HOST_BITS_PER_WIDE_INT)
10677 mhi = (unsigned HOST_WIDE_INT) -1
10678 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10684 mlo = (unsigned HOST_WIDE_INT) -1
10685 >> (HOST_BITS_PER_WIDE_INT - width);
10688 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10689 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10690 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10691 TREE_OPERAND (arg0, 0), arg1);
10693 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10694 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10695 mode which allows further optimizations. */
10702 for (w = BITS_PER_UNIT;
10703 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10706 unsigned HOST_WIDE_INT mask
10707 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10708 if (((lo1 | lo2) & mask) == mask
10709 && (lo1 & ~mask) == 0 && hi1 == 0)
10716 if (hi3 != hi1 || lo3 != lo1)
10717 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10718 fold_build2_loc (loc, BIT_AND_EXPR, type,
10719 TREE_OPERAND (arg0, 0),
10720 build_int_cst_wide (type,
10725 /* (X & Y) | Y is (X, Y). */
10726 if (TREE_CODE (arg0) == BIT_AND_EXPR
10727 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10728 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10729 /* (X & Y) | X is (Y, X). */
10730 if (TREE_CODE (arg0) == BIT_AND_EXPR
10731 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10732 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10733 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10734 /* X | (X & Y) is (Y, X). */
10735 if (TREE_CODE (arg1) == BIT_AND_EXPR
10736 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10737 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10738 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10739 /* X | (Y & X) is (Y, X). */
10740 if (TREE_CODE (arg1) == BIT_AND_EXPR
10741 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10742 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10743 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10745 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10746 if (t1 != NULL_TREE)
10749 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10751 This results in more efficient code for machines without a NAND
10752 instruction. Combine will canonicalize to the first form
10753 which will allow use of NAND instructions provided by the
10754 backend if they exist. */
10755 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10756 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10759 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10760 build2 (BIT_AND_EXPR, type,
10761 fold_convert_loc (loc, type,
10762 TREE_OPERAND (arg0, 0)),
10763 fold_convert_loc (loc, type,
10764 TREE_OPERAND (arg1, 0))));
10767 /* See if this can be simplified into a rotate first. If that
10768 is unsuccessful continue in the association code. */
10772 if (integer_zerop (arg1))
10773 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10774 if (integer_all_onesp (arg1))
10775 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10776 if (operand_equal_p (arg0, arg1, 0))
10777 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10779 /* ~X ^ X is -1. */
10780 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10781 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10783 t1 = fold_convert_loc (loc, type, integer_zero_node);
10784 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10785 return omit_one_operand_loc (loc, type, t1, arg1);
10788 /* X ^ ~X is -1. */
10789 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10790 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10792 t1 = fold_convert_loc (loc, type, integer_zero_node);
10793 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10794 return omit_one_operand_loc (loc, type, t1, arg0);
10797 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10798 with a constant, and the two constants have no bits in common,
10799 we should treat this as a BIT_IOR_EXPR since this may produce more
10800 simplifications. */
10801 if (TREE_CODE (arg0) == BIT_AND_EXPR
10802 && TREE_CODE (arg1) == BIT_AND_EXPR
10803 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10804 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10805 && integer_zerop (const_binop (BIT_AND_EXPR,
10806 TREE_OPERAND (arg0, 1),
10807 TREE_OPERAND (arg1, 1), 0)))
10809 code = BIT_IOR_EXPR;
10813 /* (X | Y) ^ X -> Y & ~ X*/
10814 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10815 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10817 tree t2 = TREE_OPERAND (arg0, 1);
10818 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10820 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10821 fold_convert_loc (loc, type, t2),
10822 fold_convert_loc (loc, type, t1));
10826 /* (Y | X) ^ X -> Y & ~ X*/
10827 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10828 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10830 tree t2 = TREE_OPERAND (arg0, 0);
10831 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10833 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10834 fold_convert_loc (loc, type, t2),
10835 fold_convert_loc (loc, type, t1));
10839 /* X ^ (X | Y) -> Y & ~ X*/
10840 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10841 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10843 tree t2 = TREE_OPERAND (arg1, 1);
10844 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10846 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10847 fold_convert_loc (loc, type, t2),
10848 fold_convert_loc (loc, type, t1));
10852 /* X ^ (Y | X) -> Y & ~ X*/
10853 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10854 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10856 tree t2 = TREE_OPERAND (arg1, 0);
10857 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10859 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10860 fold_convert_loc (loc, type, t2),
10861 fold_convert_loc (loc, type, t1));
10865 /* Convert ~X ^ ~Y to X ^ Y. */
10866 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10867 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10868 return fold_build2_loc (loc, code, type,
10869 fold_convert_loc (loc, type,
10870 TREE_OPERAND (arg0, 0)),
10871 fold_convert_loc (loc, type,
10872 TREE_OPERAND (arg1, 0)));
10874 /* Convert ~X ^ C to X ^ ~C. */
10875 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10876 && TREE_CODE (arg1) == INTEGER_CST)
10877 return fold_build2_loc (loc, code, type,
10878 fold_convert_loc (loc, type,
10879 TREE_OPERAND (arg0, 0)),
10880 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10882 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10883 if (TREE_CODE (arg0) == BIT_AND_EXPR
10884 && integer_onep (TREE_OPERAND (arg0, 1))
10885 && integer_onep (arg1))
10886 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10887 build_int_cst (TREE_TYPE (arg0), 0));
10889 /* Fold (X & Y) ^ Y as ~X & Y. */
10890 if (TREE_CODE (arg0) == BIT_AND_EXPR
10891 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10893 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10894 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10895 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10896 fold_convert_loc (loc, type, arg1));
10898 /* Fold (X & Y) ^ X as ~Y & X. */
10899 if (TREE_CODE (arg0) == BIT_AND_EXPR
10900 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10901 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10903 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10904 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10905 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10906 fold_convert_loc (loc, type, arg1));
10908 /* Fold X ^ (X & Y) as X & ~Y. */
10909 if (TREE_CODE (arg1) == BIT_AND_EXPR
10910 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10912 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10913 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10914 fold_convert_loc (loc, type, arg0),
10915 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10917 /* Fold X ^ (Y & X) as ~Y & X. */
10918 if (TREE_CODE (arg1) == BIT_AND_EXPR
10919 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10920 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10922 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10923 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10924 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10925 fold_convert_loc (loc, type, arg0));
10928 /* See if this can be simplified into a rotate first. If that
10929 is unsuccessful continue in the association code. */
10933 if (integer_all_onesp (arg1))
10934 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10935 if (integer_zerop (arg1))
10936 return omit_one_operand_loc (loc, type, arg1, arg0);
10937 if (operand_equal_p (arg0, arg1, 0))
10938 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10940 /* ~X & X is always zero. */
10941 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10942 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10943 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10945 /* X & ~X is always zero. */
10946 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10947 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10948 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10950 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10951 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10952 && TREE_CODE (arg1) == INTEGER_CST
10953 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10955 tree tmp1 = fold_convert_loc (loc, type, arg1);
10956 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10957 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10958 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10959 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10961 fold_convert_loc (loc, type,
10962 fold_build2_loc (loc, BIT_IOR_EXPR,
10963 type, tmp2, tmp3));
10966 /* (X | Y) & Y is (X, Y). */
10967 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10968 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10969 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10970 /* (X | Y) & X is (Y, X). */
10971 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10972 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10973 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10974 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10975 /* X & (X | Y) is (Y, X). */
10976 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10977 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10978 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10979 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10980 /* X & (Y | X) is (Y, X). */
10981 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10982 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10983 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10984 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10986 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10987 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10988 && integer_onep (TREE_OPERAND (arg0, 1))
10989 && integer_onep (arg1))
10991 tem = TREE_OPERAND (arg0, 0);
10992 return fold_build2_loc (loc, EQ_EXPR, type,
10993 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
10994 build_int_cst (TREE_TYPE (tem), 1)),
10995 build_int_cst (TREE_TYPE (tem), 0));
10997 /* Fold ~X & 1 as (X & 1) == 0. */
10998 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10999 && integer_onep (arg1))
11001 tem = TREE_OPERAND (arg0, 0);
11002 return fold_build2_loc (loc, EQ_EXPR, type,
11003 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11004 build_int_cst (TREE_TYPE (tem), 1)),
11005 build_int_cst (TREE_TYPE (tem), 0));
11008 /* Fold (X ^ Y) & Y as ~X & Y. */
11009 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11010 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11012 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11013 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11014 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11015 fold_convert_loc (loc, type, arg1));
11017 /* Fold (X ^ Y) & X as ~Y & X. */
11018 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11019 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11020 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11022 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11023 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11024 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11025 fold_convert_loc (loc, type, arg1));
11027 /* Fold X & (X ^ Y) as X & ~Y. */
11028 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11029 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11031 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11032 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11033 fold_convert_loc (loc, type, arg0),
11034 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11036 /* Fold X & (Y ^ X) as ~Y & X. */
11037 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11038 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11039 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11041 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11042 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11043 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11044 fold_convert_loc (loc, type, arg0));
11047 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11048 if (t1 != NULL_TREE)
11050 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11051 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11052 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11055 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11057 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11058 && (~TREE_INT_CST_LOW (arg1)
11059 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11061 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11064 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11066 This results in more efficient code for machines without a NOR
11067 instruction. Combine will canonicalize to the first form
11068 which will allow use of NOR instructions provided by the
11069 backend if they exist. */
11070 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11071 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11073 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11074 build2 (BIT_IOR_EXPR, type,
11075 fold_convert_loc (loc, type,
11076 TREE_OPERAND (arg0, 0)),
11077 fold_convert_loc (loc, type,
11078 TREE_OPERAND (arg1, 0))));
11081 /* If arg0 is derived from the address of an object or function, we may
11082 be able to fold this expression using the object or function's
11084 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11086 unsigned HOST_WIDE_INT modulus, residue;
11087 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11089 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11090 integer_onep (arg1));
11092 /* This works because modulus is a power of 2. If this weren't the
11093 case, we'd have to replace it by its greatest power-of-2
11094 divisor: modulus & -modulus. */
11096 return build_int_cst (type, residue & low);
11099 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11100 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11101 if the new mask might be further optimized. */
11102 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11103 || TREE_CODE (arg0) == RSHIFT_EXPR)
11104 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11105 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11106 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11107 < TYPE_PRECISION (TREE_TYPE (arg0))
11108 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11109 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11111 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11112 unsigned HOST_WIDE_INT mask
11113 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11114 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11115 tree shift_type = TREE_TYPE (arg0);
11117 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11118 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11119 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11120 && TYPE_PRECISION (TREE_TYPE (arg0))
11121 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11123 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11124 tree arg00 = TREE_OPERAND (arg0, 0);
11125 /* See if more bits can be proven as zero because of
11127 if (TREE_CODE (arg00) == NOP_EXPR
11128 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11130 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11131 if (TYPE_PRECISION (inner_type)
11132 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11133 && TYPE_PRECISION (inner_type) < prec)
11135 prec = TYPE_PRECISION (inner_type);
11136 /* See if we can shorten the right shift. */
11138 shift_type = inner_type;
11141 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11142 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11143 zerobits <<= prec - shiftc;
11144 /* For arithmetic shift if sign bit could be set, zerobits
11145 can contain actually sign bits, so no transformation is
11146 possible, unless MASK masks them all away. In that
11147 case the shift needs to be converted into logical shift. */
11148 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11149 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11151 if ((mask & zerobits) == 0)
11152 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11158 /* ((X << 16) & 0xff00) is (X, 0). */
11159 if ((mask & zerobits) == mask)
11160 return omit_one_operand_loc (loc, type,
11161 build_int_cst (type, 0), arg0);
11163 newmask = mask | zerobits;
11164 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11168 /* Only do the transformation if NEWMASK is some integer
11170 for (prec = BITS_PER_UNIT;
11171 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11172 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11174 if (prec < HOST_BITS_PER_WIDE_INT
11175 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11179 if (shift_type != TREE_TYPE (arg0))
11181 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11182 fold_convert_loc (loc, shift_type,
11183 TREE_OPERAND (arg0, 0)),
11184 TREE_OPERAND (arg0, 1));
11185 tem = fold_convert_loc (loc, type, tem);
11189 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11190 if (!tree_int_cst_equal (newmaskt, arg1))
11191 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11199 /* Don't touch a floating-point divide by zero unless the mode
11200 of the constant can represent infinity. */
11201 if (TREE_CODE (arg1) == REAL_CST
11202 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11203 && real_zerop (arg1))
11206 /* Optimize A / A to 1.0 if we don't care about
11207 NaNs or Infinities. Skip the transformation
11208 for non-real operands. */
11209 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11210 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11211 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11212 && operand_equal_p (arg0, arg1, 0))
11214 tree r = build_real (TREE_TYPE (arg0), dconst1);
11216 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11219 /* The complex version of the above A / A optimization. */
11220 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11221 && operand_equal_p (arg0, arg1, 0))
11223 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11224 if (! HONOR_NANS (TYPE_MODE (elem_type))
11225 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11227 tree r = build_real (elem_type, dconst1);
11228 /* omit_two_operands will call fold_convert for us. */
11229 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11233 /* (-A) / (-B) -> A / B */
11234 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11235 return fold_build2_loc (loc, RDIV_EXPR, type,
11236 TREE_OPERAND (arg0, 0),
11237 negate_expr (arg1));
11238 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11239 return fold_build2_loc (loc, RDIV_EXPR, type,
11240 negate_expr (arg0),
11241 TREE_OPERAND (arg1, 0));
11243 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11244 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11245 && real_onep (arg1))
11246 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11248 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11249 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11250 && real_minus_onep (arg1))
11251 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11252 negate_expr (arg0)));
11254 /* If ARG1 is a constant, we can convert this to a multiply by the
11255 reciprocal. This does not have the same rounding properties,
11256 so only do this if -freciprocal-math. We can actually
11257 always safely do it if ARG1 is a power of two, but it's hard to
11258 tell if it is or not in a portable manner. */
11259 if (TREE_CODE (arg1) == REAL_CST)
11261 if (flag_reciprocal_math
11262 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11264 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11265 /* Find the reciprocal if optimizing and the result is exact. */
11269 r = TREE_REAL_CST (arg1);
11270 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11272 tem = build_real (type, r);
11273 return fold_build2_loc (loc, MULT_EXPR, type,
11274 fold_convert_loc (loc, type, arg0), tem);
11278 /* Convert A/B/C to A/(B*C). */
11279 if (flag_reciprocal_math
11280 && TREE_CODE (arg0) == RDIV_EXPR)
11281 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11282 fold_build2_loc (loc, MULT_EXPR, type,
11283 TREE_OPERAND (arg0, 1), arg1));
11285 /* Convert A/(B/C) to (A/B)*C. */
11286 if (flag_reciprocal_math
11287 && TREE_CODE (arg1) == RDIV_EXPR)
11288 return fold_build2_loc (loc, MULT_EXPR, type,
11289 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11290 TREE_OPERAND (arg1, 0)),
11291 TREE_OPERAND (arg1, 1));
11293 /* Convert C1/(X*C2) into (C1/C2)/X. */
11294 if (flag_reciprocal_math
11295 && TREE_CODE (arg1) == MULT_EXPR
11296 && TREE_CODE (arg0) == REAL_CST
11297 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11299 tree tem = const_binop (RDIV_EXPR, arg0,
11300 TREE_OPERAND (arg1, 1), 0);
11302 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11303 TREE_OPERAND (arg1, 0));
11306 if (flag_unsafe_math_optimizations)
11308 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11309 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11311 /* Optimize sin(x)/cos(x) as tan(x). */
11312 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11313 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11314 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11315 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11316 CALL_EXPR_ARG (arg1, 0), 0))
11318 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11320 if (tanfn != NULL_TREE)
11321 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11324 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11325 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11326 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11327 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11328 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11329 CALL_EXPR_ARG (arg1, 0), 0))
11331 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11333 if (tanfn != NULL_TREE)
11335 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11336 CALL_EXPR_ARG (arg0, 0));
11337 return fold_build2_loc (loc, RDIV_EXPR, type,
11338 build_real (type, dconst1), tmp);
11342 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11343 NaNs or Infinities. */
11344 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11345 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11346 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11348 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11349 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11351 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11352 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11353 && operand_equal_p (arg00, arg01, 0))
11355 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11357 if (cosfn != NULL_TREE)
11358 return build_call_expr_loc (loc, cosfn, 1, arg00);
11362 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11363 NaNs or Infinities. */
11364 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11365 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11366 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11368 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11369 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11371 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11372 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11373 && operand_equal_p (arg00, arg01, 0))
11375 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11377 if (cosfn != NULL_TREE)
11379 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11380 return fold_build2_loc (loc, RDIV_EXPR, type,
11381 build_real (type, dconst1),
11387 /* Optimize pow(x,c)/x as pow(x,c-1). */
11388 if (fcode0 == BUILT_IN_POW
11389 || fcode0 == BUILT_IN_POWF
11390 || fcode0 == BUILT_IN_POWL)
11392 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11393 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11394 if (TREE_CODE (arg01) == REAL_CST
11395 && !TREE_OVERFLOW (arg01)
11396 && operand_equal_p (arg1, arg00, 0))
11398 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11402 c = TREE_REAL_CST (arg01);
11403 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11404 arg = build_real (type, c);
11405 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11409 /* Optimize a/root(b/c) into a*root(c/b). */
11410 if (BUILTIN_ROOT_P (fcode1))
11412 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11414 if (TREE_CODE (rootarg) == RDIV_EXPR)
11416 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11417 tree b = TREE_OPERAND (rootarg, 0);
11418 tree c = TREE_OPERAND (rootarg, 1);
11420 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11422 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11423 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11427 /* Optimize x/expN(y) into x*expN(-y). */
11428 if (BUILTIN_EXPONENT_P (fcode1))
11430 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11431 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11432 arg1 = build_call_expr_loc (loc,
11434 fold_convert_loc (loc, type, arg));
11435 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11438 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11439 if (fcode1 == BUILT_IN_POW
11440 || fcode1 == BUILT_IN_POWF
11441 || fcode1 == BUILT_IN_POWL)
11443 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11444 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11445 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11446 tree neg11 = fold_convert_loc (loc, type,
11447 negate_expr (arg11));
11448 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11449 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11454 case TRUNC_DIV_EXPR:
11455 case FLOOR_DIV_EXPR:
11456 /* Simplify A / (B << N) where A and B are positive and B is
11457 a power of 2, to A >> (N + log2(B)). */
11458 strict_overflow_p = false;
11459 if (TREE_CODE (arg1) == LSHIFT_EXPR
11460 && (TYPE_UNSIGNED (type)
11461 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11463 tree sval = TREE_OPERAND (arg1, 0);
11464 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11466 tree sh_cnt = TREE_OPERAND (arg1, 1);
11467 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11469 if (strict_overflow_p)
11470 fold_overflow_warning (("assuming signed overflow does not "
11471 "occur when simplifying A / (B << N)"),
11472 WARN_STRICT_OVERFLOW_MISC);
11474 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11475 sh_cnt, build_int_cst (NULL_TREE, pow2));
11476 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11477 fold_convert_loc (loc, type, arg0), sh_cnt);
11481 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11482 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11483 if (INTEGRAL_TYPE_P (type)
11484 && TYPE_UNSIGNED (type)
11485 && code == FLOOR_DIV_EXPR)
11486 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11490 case ROUND_DIV_EXPR:
11491 case CEIL_DIV_EXPR:
11492 case EXACT_DIV_EXPR:
11493 if (integer_onep (arg1))
11494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11495 if (integer_zerop (arg1))
11497 /* X / -1 is -X. */
11498 if (!TYPE_UNSIGNED (type)
11499 && TREE_CODE (arg1) == INTEGER_CST
11500 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11501 && TREE_INT_CST_HIGH (arg1) == -1)
11502 return fold_convert_loc (loc, type, negate_expr (arg0));
11504 /* Convert -A / -B to A / B when the type is signed and overflow is
11506 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11507 && TREE_CODE (arg0) == NEGATE_EXPR
11508 && negate_expr_p (arg1))
11510 if (INTEGRAL_TYPE_P (type))
11511 fold_overflow_warning (("assuming signed overflow does not occur "
11512 "when distributing negation across "
11514 WARN_STRICT_OVERFLOW_MISC);
11515 return fold_build2_loc (loc, code, type,
11516 fold_convert_loc (loc, type,
11517 TREE_OPERAND (arg0, 0)),
11518 fold_convert_loc (loc, type,
11519 negate_expr (arg1)));
11521 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11522 && TREE_CODE (arg1) == NEGATE_EXPR
11523 && negate_expr_p (arg0))
11525 if (INTEGRAL_TYPE_P (type))
11526 fold_overflow_warning (("assuming signed overflow does not occur "
11527 "when distributing negation across "
11529 WARN_STRICT_OVERFLOW_MISC);
11530 return fold_build2_loc (loc, code, type,
11531 fold_convert_loc (loc, type,
11532 negate_expr (arg0)),
11533 fold_convert_loc (loc, type,
11534 TREE_OPERAND (arg1, 0)));
11537 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11538 operation, EXACT_DIV_EXPR.
11540 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11541 At one time others generated faster code, it's not clear if they do
11542 after the last round to changes to the DIV code in expmed.c. */
11543 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11544 && multiple_of_p (type, arg0, arg1))
11545 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11547 strict_overflow_p = false;
11548 if (TREE_CODE (arg1) == INTEGER_CST
11549 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11550 &strict_overflow_p)))
11552 if (strict_overflow_p)
11553 fold_overflow_warning (("assuming signed overflow does not occur "
11554 "when simplifying division"),
11555 WARN_STRICT_OVERFLOW_MISC);
11556 return fold_convert_loc (loc, type, tem);
11561 case CEIL_MOD_EXPR:
11562 case FLOOR_MOD_EXPR:
11563 case ROUND_MOD_EXPR:
11564 case TRUNC_MOD_EXPR:
11565 /* X % 1 is always zero, but be sure to preserve any side
11567 if (integer_onep (arg1))
11568 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11570 /* X % 0, return X % 0 unchanged so that we can get the
11571 proper warnings and errors. */
11572 if (integer_zerop (arg1))
11575 /* 0 % X is always zero, but be sure to preserve any side
11576 effects in X. Place this after checking for X == 0. */
11577 if (integer_zerop (arg0))
11578 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11580 /* X % -1 is zero. */
11581 if (!TYPE_UNSIGNED (type)
11582 && TREE_CODE (arg1) == INTEGER_CST
11583 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11584 && TREE_INT_CST_HIGH (arg1) == -1)
11585 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11587 /* X % -C is the same as X % C. */
11588 if (code == TRUNC_MOD_EXPR
11589 && !TYPE_UNSIGNED (type)
11590 && TREE_CODE (arg1) == INTEGER_CST
11591 && !TREE_OVERFLOW (arg1)
11592 && TREE_INT_CST_HIGH (arg1) < 0
11593 && !TYPE_OVERFLOW_TRAPS (type)
11594 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11595 && !sign_bit_p (arg1, arg1))
11596 return fold_build2_loc (loc, code, type,
11597 fold_convert_loc (loc, type, arg0),
11598 fold_convert_loc (loc, type,
11599 negate_expr (arg1)));
11601 /* X % -Y is the same as X % Y. */
11602 if (code == TRUNC_MOD_EXPR
11603 && !TYPE_UNSIGNED (type)
11604 && TREE_CODE (arg1) == NEGATE_EXPR
11605 && !TYPE_OVERFLOW_TRAPS (type))
11606 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11607 fold_convert_loc (loc, type,
11608 TREE_OPERAND (arg1, 0)));
11610 strict_overflow_p = false;
11611 if (TREE_CODE (arg1) == INTEGER_CST
11612 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11613 &strict_overflow_p)))
11615 if (strict_overflow_p)
11616 fold_overflow_warning (("assuming signed overflow does not occur "
11617 "when simplifying modulus"),
11618 WARN_STRICT_OVERFLOW_MISC);
11619 return fold_convert_loc (loc, type, tem);
11622 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11623 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11624 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11625 && (TYPE_UNSIGNED (type)
11626 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11629 /* Also optimize A % (C << N) where C is a power of 2,
11630 to A & ((C << N) - 1). */
11631 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11632 c = TREE_OPERAND (arg1, 0);
11634 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11637 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11638 build_int_cst (TREE_TYPE (arg1), 1));
11639 if (strict_overflow_p)
11640 fold_overflow_warning (("assuming signed overflow does not "
11641 "occur when simplifying "
11642 "X % (power of two)"),
11643 WARN_STRICT_OVERFLOW_MISC);
11644 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11645 fold_convert_loc (loc, type, arg0),
11646 fold_convert_loc (loc, type, mask));
11654 if (integer_all_onesp (arg0))
11655 return omit_one_operand_loc (loc, type, arg0, arg1);
11659 /* Optimize -1 >> x for arithmetic right shifts. */
11660 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11661 && tree_expr_nonnegative_p (arg1))
11662 return omit_one_operand_loc (loc, type, arg0, arg1);
11663 /* ... fall through ... */
11667 if (integer_zerop (arg1))
11668 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11669 if (integer_zerop (arg0))
11670 return omit_one_operand_loc (loc, type, arg0, arg1);
11672 /* Since negative shift count is not well-defined,
11673 don't try to compute it in the compiler. */
11674 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11677 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11678 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11679 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11680 && host_integerp (TREE_OPERAND (arg0, 1), false)
11681 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11683 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11684 + TREE_INT_CST_LOW (arg1));
11686 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11687 being well defined. */
11688 if (low >= TYPE_PRECISION (type))
11690 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11691 low = low % TYPE_PRECISION (type);
11692 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11693 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11694 TREE_OPERAND (arg0, 0));
11696 low = TYPE_PRECISION (type) - 1;
11699 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11700 build_int_cst (type, low));
11703 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11704 into x & ((unsigned)-1 >> c) for unsigned types. */
11705 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11706 || (TYPE_UNSIGNED (type)
11707 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11708 && host_integerp (arg1, false)
11709 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11710 && host_integerp (TREE_OPERAND (arg0, 1), false)
11711 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11713 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11714 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11720 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11722 lshift = build_int_cst (type, -1);
11723 lshift = int_const_binop (code, lshift, arg1, 0);
11725 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11729 /* Rewrite an LROTATE_EXPR by a constant into an
11730 RROTATE_EXPR by a new constant. */
11731 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11733 tree tem = build_int_cst (TREE_TYPE (arg1),
11734 TYPE_PRECISION (type));
11735 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11736 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11739 /* If we have a rotate of a bit operation with the rotate count and
11740 the second operand of the bit operation both constant,
11741 permute the two operations. */
11742 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11743 && (TREE_CODE (arg0) == BIT_AND_EXPR
11744 || TREE_CODE (arg0) == BIT_IOR_EXPR
11745 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11746 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11747 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11748 fold_build2_loc (loc, code, type,
11749 TREE_OPERAND (arg0, 0), arg1),
11750 fold_build2_loc (loc, code, type,
11751 TREE_OPERAND (arg0, 1), arg1));
11753 /* Two consecutive rotates adding up to the precision of the
11754 type can be ignored. */
11755 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11756 && TREE_CODE (arg0) == RROTATE_EXPR
11757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11758 && TREE_INT_CST_HIGH (arg1) == 0
11759 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11760 && ((TREE_INT_CST_LOW (arg1)
11761 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11762 == (unsigned int) TYPE_PRECISION (type)))
11763 return TREE_OPERAND (arg0, 0);
11765 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11766 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11767 if the latter can be further optimized. */
11768 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11769 && TREE_CODE (arg0) == BIT_AND_EXPR
11770 && TREE_CODE (arg1) == INTEGER_CST
11771 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11773 tree mask = fold_build2_loc (loc, code, type,
11774 fold_convert_loc (loc, type,
11775 TREE_OPERAND (arg0, 1)),
11777 tree shift = fold_build2_loc (loc, code, type,
11778 fold_convert_loc (loc, type,
11779 TREE_OPERAND (arg0, 0)),
11781 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11789 if (operand_equal_p (arg0, arg1, 0))
11790 return omit_one_operand_loc (loc, type, arg0, arg1);
11791 if (INTEGRAL_TYPE_P (type)
11792 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11793 return omit_one_operand_loc (loc, type, arg1, arg0);
11794 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11800 if (operand_equal_p (arg0, arg1, 0))
11801 return omit_one_operand_loc (loc, type, arg0, arg1);
11802 if (INTEGRAL_TYPE_P (type)
11803 && TYPE_MAX_VALUE (type)
11804 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11805 return omit_one_operand_loc (loc, type, arg1, arg0);
11806 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11811 case TRUTH_ANDIF_EXPR:
11812 /* Note that the operands of this must be ints
11813 and their values must be 0 or 1.
11814 ("true" is a fixed value perhaps depending on the language.) */
11815 /* If first arg is constant zero, return it. */
11816 if (integer_zerop (arg0))
11817 return fold_convert_loc (loc, type, arg0);
11818 case TRUTH_AND_EXPR:
11819 /* If either arg is constant true, drop it. */
11820 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11821 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11822 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11823 /* Preserve sequence points. */
11824 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11825 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11826 /* If second arg is constant zero, result is zero, but first arg
11827 must be evaluated. */
11828 if (integer_zerop (arg1))
11829 return omit_one_operand_loc (loc, type, arg1, arg0);
11830 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11831 case will be handled here. */
11832 if (integer_zerop (arg0))
11833 return omit_one_operand_loc (loc, type, arg0, arg1);
11835 /* !X && X is always false. */
11836 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11837 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11838 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11839 /* X && !X is always false. */
11840 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11841 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11842 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11844 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11845 means A >= Y && A != MAX, but in this case we know that
11848 if (!TREE_SIDE_EFFECTS (arg0)
11849 && !TREE_SIDE_EFFECTS (arg1))
11851 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11852 if (tem && !operand_equal_p (tem, arg0, 0))
11853 return fold_build2_loc (loc, code, type, tem, arg1);
11855 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11856 if (tem && !operand_equal_p (tem, arg1, 0))
11857 return fold_build2_loc (loc, code, type, arg0, tem);
11861 /* We only do these simplifications if we are optimizing. */
11865 /* Check for things like (A || B) && (A || C). We can convert this
11866 to A || (B && C). Note that either operator can be any of the four
11867 truth and/or operations and the transformation will still be
11868 valid. Also note that we only care about order for the
11869 ANDIF and ORIF operators. If B contains side effects, this
11870 might change the truth-value of A. */
11871 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11872 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11873 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11874 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11875 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11876 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11878 tree a00 = TREE_OPERAND (arg0, 0);
11879 tree a01 = TREE_OPERAND (arg0, 1);
11880 tree a10 = TREE_OPERAND (arg1, 0);
11881 tree a11 = TREE_OPERAND (arg1, 1);
11882 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11883 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11884 && (code == TRUTH_AND_EXPR
11885 || code == TRUTH_OR_EXPR));
11887 if (operand_equal_p (a00, a10, 0))
11888 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11889 fold_build2_loc (loc, code, type, a01, a11));
11890 else if (commutative && operand_equal_p (a00, a11, 0))
11891 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
11892 fold_build2_loc (loc, code, type, a01, a10));
11893 else if (commutative && operand_equal_p (a01, a10, 0))
11894 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
11895 fold_build2_loc (loc, code, type, a00, a11));
11897 /* This case if tricky because we must either have commutative
11898 operators or else A10 must not have side-effects. */
11900 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11901 && operand_equal_p (a01, a11, 0))
11902 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11903 fold_build2_loc (loc, code, type, a00, a10),
11907 /* See if we can build a range comparison. */
11908 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
11911 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
11912 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
11914 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
11916 return fold_build2_loc (loc, code, type, tem, arg1);
11919 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
11920 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
11922 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
11924 return fold_build2_loc (loc, code, type, arg0, tem);
11927 /* Check for the possibility of merging component references. If our
11928 lhs is another similar operation, try to merge its rhs with our
11929 rhs. Then try to merge our lhs and rhs. */
11930 if (TREE_CODE (arg0) == code
11931 && 0 != (tem = fold_truthop (loc, code, type,
11932 TREE_OPERAND (arg0, 1), arg1)))
11933 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
11935 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
11940 case TRUTH_ORIF_EXPR:
11941 /* Note that the operands of this must be ints
11942 and their values must be 0 or true.
11943 ("true" is a fixed value perhaps depending on the language.) */
11944 /* If first arg is constant true, return it. */
11945 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11946 return fold_convert_loc (loc, type, arg0);
11947 case TRUTH_OR_EXPR:
11948 /* If either arg is constant zero, drop it. */
11949 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11950 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11951 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11952 /* Preserve sequence points. */
11953 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11954 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11955 /* If second arg is constant true, result is true, but we must
11956 evaluate first arg. */
11957 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11958 return omit_one_operand_loc (loc, type, arg1, arg0);
11959 /* Likewise for first arg, but note this only occurs here for
11961 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11962 return omit_one_operand_loc (loc, type, arg0, arg1);
11964 /* !X || X is always true. */
11965 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11966 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11967 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11968 /* X || !X is always true. */
11969 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11970 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11971 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11975 case TRUTH_XOR_EXPR:
11976 /* If the second arg is constant zero, drop it. */
11977 if (integer_zerop (arg1))
11978 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11979 /* If the second arg is constant true, this is a logical inversion. */
11980 if (integer_onep (arg1))
11982 /* Only call invert_truthvalue if operand is a truth value. */
11983 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11984 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11986 tem = invert_truthvalue_loc (loc, arg0);
11987 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11989 /* Identical arguments cancel to zero. */
11990 if (operand_equal_p (arg0, arg1, 0))
11991 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11993 /* !X ^ X is always true. */
11994 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11995 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11996 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11998 /* X ^ !X is always true. */
11999 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12000 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12001 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12007 tem = fold_comparison (loc, code, type, op0, op1);
12008 if (tem != NULL_TREE)
12011 /* bool_var != 0 becomes bool_var. */
12012 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12013 && code == NE_EXPR)
12014 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12016 /* bool_var == 1 becomes bool_var. */
12017 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12018 && code == EQ_EXPR)
12019 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12021 /* bool_var != 1 becomes !bool_var. */
12022 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12023 && code == NE_EXPR)
12024 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12025 fold_convert_loc (loc, type, arg0));
12027 /* bool_var == 0 becomes !bool_var. */
12028 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12029 && code == EQ_EXPR)
12030 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12031 fold_convert_loc (loc, type, arg0));
12033 /* !exp != 0 becomes !exp */
12034 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12035 && code == NE_EXPR)
12036 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12038 /* If this is an equality comparison of the address of two non-weak,
12039 unaliased symbols neither of which are extern (since we do not
12040 have access to attributes for externs), then we know the result. */
12041 if (TREE_CODE (arg0) == ADDR_EXPR
12042 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12043 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12044 && ! lookup_attribute ("alias",
12045 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12046 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12047 && TREE_CODE (arg1) == ADDR_EXPR
12048 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12049 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12050 && ! lookup_attribute ("alias",
12051 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12052 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12054 /* We know that we're looking at the address of two
12055 non-weak, unaliased, static _DECL nodes.
12057 It is both wasteful and incorrect to call operand_equal_p
12058 to compare the two ADDR_EXPR nodes. It is wasteful in that
12059 all we need to do is test pointer equality for the arguments
12060 to the two ADDR_EXPR nodes. It is incorrect to use
12061 operand_equal_p as that function is NOT equivalent to a
12062 C equality test. It can in fact return false for two
12063 objects which would test as equal using the C equality
12065 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12066 return constant_boolean_node (equal
12067 ? code == EQ_EXPR : code != EQ_EXPR,
12071 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12072 a MINUS_EXPR of a constant, we can convert it into a comparison with
12073 a revised constant as long as no overflow occurs. */
12074 if (TREE_CODE (arg1) == INTEGER_CST
12075 && (TREE_CODE (arg0) == PLUS_EXPR
12076 || TREE_CODE (arg0) == MINUS_EXPR)
12077 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12078 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12079 ? MINUS_EXPR : PLUS_EXPR,
12080 fold_convert_loc (loc, TREE_TYPE (arg0),
12082 TREE_OPERAND (arg0, 1), 0))
12083 && !TREE_OVERFLOW (tem))
12084 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12086 /* Similarly for a NEGATE_EXPR. */
12087 if (TREE_CODE (arg0) == NEGATE_EXPR
12088 && TREE_CODE (arg1) == INTEGER_CST
12089 && 0 != (tem = negate_expr (arg1))
12090 && TREE_CODE (tem) == INTEGER_CST
12091 && !TREE_OVERFLOW (tem))
12092 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12094 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12095 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12096 && TREE_CODE (arg1) == INTEGER_CST
12097 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12098 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12099 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12100 fold_convert_loc (loc,
12103 TREE_OPERAND (arg0, 1)));
12105 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12106 if ((TREE_CODE (arg0) == PLUS_EXPR
12107 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12108 || TREE_CODE (arg0) == MINUS_EXPR)
12109 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12110 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12111 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12113 tree val = TREE_OPERAND (arg0, 1);
12114 return omit_two_operands_loc (loc, type,
12115 fold_build2_loc (loc, code, type,
12117 build_int_cst (TREE_TYPE (val),
12119 TREE_OPERAND (arg0, 0), arg1);
12122 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12123 if (TREE_CODE (arg0) == MINUS_EXPR
12124 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12125 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12126 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12128 return omit_two_operands_loc (loc, type,
12130 ? boolean_true_node : boolean_false_node,
12131 TREE_OPERAND (arg0, 1), arg1);
12134 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12135 for !=. Don't do this for ordered comparisons due to overflow. */
12136 if (TREE_CODE (arg0) == MINUS_EXPR
12137 && integer_zerop (arg1))
12138 return fold_build2_loc (loc, code, type,
12139 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12141 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12142 if (TREE_CODE (arg0) == ABS_EXPR
12143 && (integer_zerop (arg1) || real_zerop (arg1)))
12144 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12146 /* If this is an EQ or NE comparison with zero and ARG0 is
12147 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12148 two operations, but the latter can be done in one less insn
12149 on machines that have only two-operand insns or on which a
12150 constant cannot be the first operand. */
12151 if (TREE_CODE (arg0) == BIT_AND_EXPR
12152 && integer_zerop (arg1))
12154 tree arg00 = TREE_OPERAND (arg0, 0);
12155 tree arg01 = TREE_OPERAND (arg0, 1);
12156 if (TREE_CODE (arg00) == LSHIFT_EXPR
12157 && integer_onep (TREE_OPERAND (arg00, 0)))
12159 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12160 arg01, TREE_OPERAND (arg00, 1));
12161 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12162 build_int_cst (TREE_TYPE (arg0), 1));
12163 return fold_build2_loc (loc, code, type,
12164 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12167 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12168 && integer_onep (TREE_OPERAND (arg01, 0)))
12170 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12171 arg00, TREE_OPERAND (arg01, 1));
12172 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12173 build_int_cst (TREE_TYPE (arg0), 1));
12174 return fold_build2_loc (loc, code, type,
12175 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12180 /* If this is an NE or EQ comparison of zero against the result of a
12181 signed MOD operation whose second operand is a power of 2, make
12182 the MOD operation unsigned since it is simpler and equivalent. */
12183 if (integer_zerop (arg1)
12184 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12185 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12186 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12187 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12188 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12189 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12191 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12192 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12193 fold_convert_loc (loc, newtype,
12194 TREE_OPERAND (arg0, 0)),
12195 fold_convert_loc (loc, newtype,
12196 TREE_OPERAND (arg0, 1)));
12198 return fold_build2_loc (loc, code, type, newmod,
12199 fold_convert_loc (loc, newtype, arg1));
12202 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12203 C1 is a valid shift constant, and C2 is a power of two, i.e.
12205 if (TREE_CODE (arg0) == BIT_AND_EXPR
12206 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12207 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12209 && integer_pow2p (TREE_OPERAND (arg0, 1))
12210 && integer_zerop (arg1))
12212 tree itype = TREE_TYPE (arg0);
12213 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12214 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12216 /* Check for a valid shift count. */
12217 if (TREE_INT_CST_HIGH (arg001) == 0
12218 && TREE_INT_CST_LOW (arg001) < prec)
12220 tree arg01 = TREE_OPERAND (arg0, 1);
12221 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12222 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12223 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12224 can be rewritten as (X & (C2 << C1)) != 0. */
12225 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12227 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12228 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12229 return fold_build2_loc (loc, code, type, tem, arg1);
12231 /* Otherwise, for signed (arithmetic) shifts,
12232 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12233 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12234 else if (!TYPE_UNSIGNED (itype))
12235 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12236 arg000, build_int_cst (itype, 0));
12237 /* Otherwise, of unsigned (logical) shifts,
12238 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12239 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12241 return omit_one_operand_loc (loc, type,
12242 code == EQ_EXPR ? integer_one_node
12243 : integer_zero_node,
12248 /* If this is an NE comparison of zero with an AND of one, remove the
12249 comparison since the AND will give the correct value. */
12250 if (code == NE_EXPR
12251 && integer_zerop (arg1)
12252 && TREE_CODE (arg0) == BIT_AND_EXPR
12253 && integer_onep (TREE_OPERAND (arg0, 1)))
12254 return fold_convert_loc (loc, type, arg0);
12256 /* If we have (A & C) == C where C is a power of 2, convert this into
12257 (A & C) != 0. Similarly for NE_EXPR. */
12258 if (TREE_CODE (arg0) == BIT_AND_EXPR
12259 && integer_pow2p (TREE_OPERAND (arg0, 1))
12260 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12261 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12262 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12263 integer_zero_node));
12265 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12266 bit, then fold the expression into A < 0 or A >= 0. */
12267 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12271 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12272 Similarly for NE_EXPR. */
12273 if (TREE_CODE (arg0) == BIT_AND_EXPR
12274 && TREE_CODE (arg1) == INTEGER_CST
12275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12277 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12278 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12279 TREE_OPERAND (arg0, 1));
12280 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12282 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12283 if (integer_nonzerop (dandnotc))
12284 return omit_one_operand_loc (loc, type, rslt, arg0);
12287 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12288 Similarly for NE_EXPR. */
12289 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12290 && TREE_CODE (arg1) == INTEGER_CST
12291 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12293 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12294 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12295 TREE_OPERAND (arg0, 1), notd);
12296 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12297 if (integer_nonzerop (candnotd))
12298 return omit_one_operand_loc (loc, type, rslt, arg0);
12301 /* If this is a comparison of a field, we may be able to simplify it. */
12302 if ((TREE_CODE (arg0) == COMPONENT_REF
12303 || TREE_CODE (arg0) == BIT_FIELD_REF)
12304 /* Handle the constant case even without -O
12305 to make sure the warnings are given. */
12306 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12308 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12313 /* Optimize comparisons of strlen vs zero to a compare of the
12314 first character of the string vs zero. To wit,
12315 strlen(ptr) == 0 => *ptr == 0
12316 strlen(ptr) != 0 => *ptr != 0
12317 Other cases should reduce to one of these two (or a constant)
12318 due to the return value of strlen being unsigned. */
12319 if (TREE_CODE (arg0) == CALL_EXPR
12320 && integer_zerop (arg1))
12322 tree fndecl = get_callee_fndecl (arg0);
12325 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12326 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12327 && call_expr_nargs (arg0) == 1
12328 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12330 tree iref = build_fold_indirect_ref_loc (loc,
12331 CALL_EXPR_ARG (arg0, 0));
12332 return fold_build2_loc (loc, code, type, iref,
12333 build_int_cst (TREE_TYPE (iref), 0));
12337 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12338 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12339 if (TREE_CODE (arg0) == RSHIFT_EXPR
12340 && integer_zerop (arg1)
12341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12343 tree arg00 = TREE_OPERAND (arg0, 0);
12344 tree arg01 = TREE_OPERAND (arg0, 1);
12345 tree itype = TREE_TYPE (arg00);
12346 if (TREE_INT_CST_HIGH (arg01) == 0
12347 && TREE_INT_CST_LOW (arg01)
12348 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12350 if (TYPE_UNSIGNED (itype))
12352 itype = signed_type_for (itype);
12353 arg00 = fold_convert_loc (loc, itype, arg00);
12355 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12356 type, arg00, build_int_cst (itype, 0));
12360 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12361 if (integer_zerop (arg1)
12362 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12363 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12364 TREE_OPERAND (arg0, 1));
12366 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12367 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12368 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12369 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12370 build_int_cst (TREE_TYPE (arg1), 0));
12371 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12372 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12373 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12374 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12375 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12376 build_int_cst (TREE_TYPE (arg1), 0));
12378 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12379 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12380 && TREE_CODE (arg1) == INTEGER_CST
12381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12382 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12383 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12384 TREE_OPERAND (arg0, 1), arg1));
12386 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12387 (X & C) == 0 when C is a single bit. */
12388 if (TREE_CODE (arg0) == BIT_AND_EXPR
12389 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12390 && integer_zerop (arg1)
12391 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12393 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12394 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12395 TREE_OPERAND (arg0, 1));
12396 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12400 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12401 constant C is a power of two, i.e. a single bit. */
12402 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12403 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12404 && integer_zerop (arg1)
12405 && integer_pow2p (TREE_OPERAND (arg0, 1))
12406 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12407 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12409 tree arg00 = TREE_OPERAND (arg0, 0);
12410 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12411 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12414 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12415 when is C is a power of two, i.e. a single bit. */
12416 if (TREE_CODE (arg0) == BIT_AND_EXPR
12417 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12418 && integer_zerop (arg1)
12419 && integer_pow2p (TREE_OPERAND (arg0, 1))
12420 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12421 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12423 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12424 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12425 arg000, TREE_OPERAND (arg0, 1));
12426 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12427 tem, build_int_cst (TREE_TYPE (tem), 0));
12430 if (integer_zerop (arg1)
12431 && tree_expr_nonzero_p (arg0))
12433 tree res = constant_boolean_node (code==NE_EXPR, type);
12434 return omit_one_operand_loc (loc, type, res, arg0);
12437 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12438 if (TREE_CODE (arg0) == NEGATE_EXPR
12439 && TREE_CODE (arg1) == NEGATE_EXPR)
12440 return fold_build2_loc (loc, code, type,
12441 TREE_OPERAND (arg0, 0),
12442 TREE_OPERAND (arg1, 0));
12444 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12445 if (TREE_CODE (arg0) == BIT_AND_EXPR
12446 && TREE_CODE (arg1) == BIT_AND_EXPR)
12448 tree arg00 = TREE_OPERAND (arg0, 0);
12449 tree arg01 = TREE_OPERAND (arg0, 1);
12450 tree arg10 = TREE_OPERAND (arg1, 0);
12451 tree arg11 = TREE_OPERAND (arg1, 1);
12452 tree itype = TREE_TYPE (arg0);
12454 if (operand_equal_p (arg01, arg11, 0))
12455 return fold_build2_loc (loc, code, type,
12456 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12457 fold_build2_loc (loc,
12458 BIT_XOR_EXPR, itype,
12461 build_int_cst (itype, 0));
12463 if (operand_equal_p (arg01, arg10, 0))
12464 return fold_build2_loc (loc, code, type,
12465 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12466 fold_build2_loc (loc,
12467 BIT_XOR_EXPR, itype,
12470 build_int_cst (itype, 0));
12472 if (operand_equal_p (arg00, arg11, 0))
12473 return fold_build2_loc (loc, code, type,
12474 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12475 fold_build2_loc (loc,
12476 BIT_XOR_EXPR, itype,
12479 build_int_cst (itype, 0));
12481 if (operand_equal_p (arg00, arg10, 0))
12482 return fold_build2_loc (loc, code, type,
12483 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12484 fold_build2_loc (loc,
12485 BIT_XOR_EXPR, itype,
12488 build_int_cst (itype, 0));
12491 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12492 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12494 tree arg00 = TREE_OPERAND (arg0, 0);
12495 tree arg01 = TREE_OPERAND (arg0, 1);
12496 tree arg10 = TREE_OPERAND (arg1, 0);
12497 tree arg11 = TREE_OPERAND (arg1, 1);
12498 tree itype = TREE_TYPE (arg0);
12500 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12501 operand_equal_p guarantees no side-effects so we don't need
12502 to use omit_one_operand on Z. */
12503 if (operand_equal_p (arg01, arg11, 0))
12504 return fold_build2_loc (loc, code, type, arg00, arg10);
12505 if (operand_equal_p (arg01, arg10, 0))
12506 return fold_build2_loc (loc, code, type, arg00, arg11);
12507 if (operand_equal_p (arg00, arg11, 0))
12508 return fold_build2_loc (loc, code, type, arg01, arg10);
12509 if (operand_equal_p (arg00, arg10, 0))
12510 return fold_build2_loc (loc, code, type, arg01, arg11);
12512 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12513 if (TREE_CODE (arg01) == INTEGER_CST
12514 && TREE_CODE (arg11) == INTEGER_CST)
12515 return fold_build2_loc (loc, code, type,
12516 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12517 fold_build2_loc (loc,
12518 BIT_XOR_EXPR, itype,
12523 /* Attempt to simplify equality/inequality comparisons of complex
12524 values. Only lower the comparison if the result is known or
12525 can be simplified to a single scalar comparison. */
12526 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12527 || TREE_CODE (arg0) == COMPLEX_CST)
12528 && (TREE_CODE (arg1) == COMPLEX_EXPR
12529 || TREE_CODE (arg1) == COMPLEX_CST))
12531 tree real0, imag0, real1, imag1;
12534 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12536 real0 = TREE_OPERAND (arg0, 0);
12537 imag0 = TREE_OPERAND (arg0, 1);
12541 real0 = TREE_REALPART (arg0);
12542 imag0 = TREE_IMAGPART (arg0);
12545 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12547 real1 = TREE_OPERAND (arg1, 0);
12548 imag1 = TREE_OPERAND (arg1, 1);
12552 real1 = TREE_REALPART (arg1);
12553 imag1 = TREE_IMAGPART (arg1);
12556 rcond = fold_binary_loc (loc, code, type, real0, real1);
12557 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12559 if (integer_zerop (rcond))
12561 if (code == EQ_EXPR)
12562 return omit_two_operands_loc (loc, type, boolean_false_node,
12564 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12568 if (code == NE_EXPR)
12569 return omit_two_operands_loc (loc, type, boolean_true_node,
12571 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12575 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12576 if (icond && TREE_CODE (icond) == INTEGER_CST)
12578 if (integer_zerop (icond))
12580 if (code == EQ_EXPR)
12581 return omit_two_operands_loc (loc, type, boolean_false_node,
12583 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12587 if (code == NE_EXPR)
12588 return omit_two_operands_loc (loc, type, boolean_true_node,
12590 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12601 tem = fold_comparison (loc, code, type, op0, op1);
12602 if (tem != NULL_TREE)
12605 /* Transform comparisons of the form X +- C CMP X. */
12606 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12607 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12608 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12609 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12610 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12611 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12613 tree arg01 = TREE_OPERAND (arg0, 1);
12614 enum tree_code code0 = TREE_CODE (arg0);
12617 if (TREE_CODE (arg01) == REAL_CST)
12618 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12620 is_positive = tree_int_cst_sgn (arg01);
12622 /* (X - c) > X becomes false. */
12623 if (code == GT_EXPR
12624 && ((code0 == MINUS_EXPR && is_positive >= 0)
12625 || (code0 == PLUS_EXPR && is_positive <= 0)))
12627 if (TREE_CODE (arg01) == INTEGER_CST
12628 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12629 fold_overflow_warning (("assuming signed overflow does not "
12630 "occur when assuming that (X - c) > X "
12631 "is always false"),
12632 WARN_STRICT_OVERFLOW_ALL);
12633 return constant_boolean_node (0, type);
12636 /* Likewise (X + c) < X becomes false. */
12637 if (code == LT_EXPR
12638 && ((code0 == PLUS_EXPR && is_positive >= 0)
12639 || (code0 == MINUS_EXPR && is_positive <= 0)))
12641 if (TREE_CODE (arg01) == INTEGER_CST
12642 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12643 fold_overflow_warning (("assuming signed overflow does not "
12644 "occur when assuming that "
12645 "(X + c) < X is always false"),
12646 WARN_STRICT_OVERFLOW_ALL);
12647 return constant_boolean_node (0, type);
12650 /* Convert (X - c) <= X to true. */
12651 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12653 && ((code0 == MINUS_EXPR && is_positive >= 0)
12654 || (code0 == PLUS_EXPR && is_positive <= 0)))
12656 if (TREE_CODE (arg01) == INTEGER_CST
12657 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12658 fold_overflow_warning (("assuming signed overflow does not "
12659 "occur when assuming that "
12660 "(X - c) <= X is always true"),
12661 WARN_STRICT_OVERFLOW_ALL);
12662 return constant_boolean_node (1, type);
12665 /* Convert (X + c) >= X to true. */
12666 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12668 && ((code0 == PLUS_EXPR && is_positive >= 0)
12669 || (code0 == MINUS_EXPR && is_positive <= 0)))
12671 if (TREE_CODE (arg01) == INTEGER_CST
12672 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12673 fold_overflow_warning (("assuming signed overflow does not "
12674 "occur when assuming that "
12675 "(X + c) >= X is always true"),
12676 WARN_STRICT_OVERFLOW_ALL);
12677 return constant_boolean_node (1, type);
12680 if (TREE_CODE (arg01) == INTEGER_CST)
12682 /* Convert X + c > X and X - c < X to true for integers. */
12683 if (code == GT_EXPR
12684 && ((code0 == PLUS_EXPR && is_positive > 0)
12685 || (code0 == MINUS_EXPR && is_positive < 0)))
12687 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12688 fold_overflow_warning (("assuming signed overflow does "
12689 "not occur when assuming that "
12690 "(X + c) > X is always true"),
12691 WARN_STRICT_OVERFLOW_ALL);
12692 return constant_boolean_node (1, type);
12695 if (code == LT_EXPR
12696 && ((code0 == MINUS_EXPR && is_positive > 0)
12697 || (code0 == PLUS_EXPR && is_positive < 0)))
12699 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12700 fold_overflow_warning (("assuming signed overflow does "
12701 "not occur when assuming that "
12702 "(X - c) < X is always true"),
12703 WARN_STRICT_OVERFLOW_ALL);
12704 return constant_boolean_node (1, type);
12707 /* Convert X + c <= X and X - c >= X to false for integers. */
12708 if (code == LE_EXPR
12709 && ((code0 == PLUS_EXPR && is_positive > 0)
12710 || (code0 == MINUS_EXPR && is_positive < 0)))
12712 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12713 fold_overflow_warning (("assuming signed overflow does "
12714 "not occur when assuming that "
12715 "(X + c) <= X is always false"),
12716 WARN_STRICT_OVERFLOW_ALL);
12717 return constant_boolean_node (0, type);
12720 if (code == GE_EXPR
12721 && ((code0 == MINUS_EXPR && is_positive > 0)
12722 || (code0 == PLUS_EXPR && is_positive < 0)))
12724 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12725 fold_overflow_warning (("assuming signed overflow does "
12726 "not occur when assuming that "
12727 "(X - c) >= X is always false"),
12728 WARN_STRICT_OVERFLOW_ALL);
12729 return constant_boolean_node (0, type);
12734 /* Comparisons with the highest or lowest possible integer of
12735 the specified precision will have known values. */
12737 tree arg1_type = TREE_TYPE (arg1);
12738 unsigned int width = TYPE_PRECISION (arg1_type);
12740 if (TREE_CODE (arg1) == INTEGER_CST
12741 && width <= 2 * HOST_BITS_PER_WIDE_INT
12742 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12744 HOST_WIDE_INT signed_max_hi;
12745 unsigned HOST_WIDE_INT signed_max_lo;
12746 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12748 if (width <= HOST_BITS_PER_WIDE_INT)
12750 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12755 if (TYPE_UNSIGNED (arg1_type))
12757 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12763 max_lo = signed_max_lo;
12764 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12770 width -= HOST_BITS_PER_WIDE_INT;
12771 signed_max_lo = -1;
12772 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12777 if (TYPE_UNSIGNED (arg1_type))
12779 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12784 max_hi = signed_max_hi;
12785 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12789 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12790 && TREE_INT_CST_LOW (arg1) == max_lo)
12794 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12797 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12800 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12803 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12805 /* The GE_EXPR and LT_EXPR cases above are not normally
12806 reached because of previous transformations. */
12811 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12813 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12817 arg1 = const_binop (PLUS_EXPR, arg1,
12818 build_int_cst (TREE_TYPE (arg1), 1), 0);
12819 return fold_build2_loc (loc, EQ_EXPR, type,
12820 fold_convert_loc (loc,
12821 TREE_TYPE (arg1), arg0),
12824 arg1 = const_binop (PLUS_EXPR, arg1,
12825 build_int_cst (TREE_TYPE (arg1), 1), 0);
12826 return fold_build2_loc (loc, NE_EXPR, type,
12827 fold_convert_loc (loc, TREE_TYPE (arg1),
12833 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12835 && TREE_INT_CST_LOW (arg1) == min_lo)
12839 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12842 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12845 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12848 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12853 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12855 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12859 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12860 return fold_build2_loc (loc, NE_EXPR, type,
12861 fold_convert_loc (loc,
12862 TREE_TYPE (arg1), arg0),
12865 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12866 return fold_build2_loc (loc, EQ_EXPR, type,
12867 fold_convert_loc (loc, TREE_TYPE (arg1),
12874 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12875 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12876 && TYPE_UNSIGNED (arg1_type)
12877 /* We will flip the signedness of the comparison operator
12878 associated with the mode of arg1, so the sign bit is
12879 specified by this mode. Check that arg1 is the signed
12880 max associated with this sign bit. */
12881 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12882 /* signed_type does not work on pointer types. */
12883 && INTEGRAL_TYPE_P (arg1_type))
12885 /* The following case also applies to X < signed_max+1
12886 and X >= signed_max+1 because previous transformations. */
12887 if (code == LE_EXPR || code == GT_EXPR)
12890 st = signed_type_for (TREE_TYPE (arg1));
12891 return fold_build2_loc (loc,
12892 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12893 type, fold_convert_loc (loc, st, arg0),
12894 build_int_cst (st, 0));
12900 /* If we are comparing an ABS_EXPR with a constant, we can
12901 convert all the cases into explicit comparisons, but they may
12902 well not be faster than doing the ABS and one comparison.
12903 But ABS (X) <= C is a range comparison, which becomes a subtraction
12904 and a comparison, and is probably faster. */
12905 if (code == LE_EXPR
12906 && TREE_CODE (arg1) == INTEGER_CST
12907 && TREE_CODE (arg0) == ABS_EXPR
12908 && ! TREE_SIDE_EFFECTS (arg0)
12909 && (0 != (tem = negate_expr (arg1)))
12910 && TREE_CODE (tem) == INTEGER_CST
12911 && !TREE_OVERFLOW (tem))
12912 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12913 build2 (GE_EXPR, type,
12914 TREE_OPERAND (arg0, 0), tem),
12915 build2 (LE_EXPR, type,
12916 TREE_OPERAND (arg0, 0), arg1));
12918 /* Convert ABS_EXPR<x> >= 0 to true. */
12919 strict_overflow_p = false;
12920 if (code == GE_EXPR
12921 && (integer_zerop (arg1)
12922 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12923 && real_zerop (arg1)))
12924 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12926 if (strict_overflow_p)
12927 fold_overflow_warning (("assuming signed overflow does not occur "
12928 "when simplifying comparison of "
12929 "absolute value and zero"),
12930 WARN_STRICT_OVERFLOW_CONDITIONAL);
12931 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12934 /* Convert ABS_EXPR<x> < 0 to false. */
12935 strict_overflow_p = false;
12936 if (code == LT_EXPR
12937 && (integer_zerop (arg1) || real_zerop (arg1))
12938 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12940 if (strict_overflow_p)
12941 fold_overflow_warning (("assuming signed overflow does not occur "
12942 "when simplifying comparison of "
12943 "absolute value and zero"),
12944 WARN_STRICT_OVERFLOW_CONDITIONAL);
12945 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12948 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12949 and similarly for >= into !=. */
12950 if ((code == LT_EXPR || code == GE_EXPR)
12951 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12952 && TREE_CODE (arg1) == LSHIFT_EXPR
12953 && integer_onep (TREE_OPERAND (arg1, 0)))
12955 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12956 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12957 TREE_OPERAND (arg1, 1)),
12958 build_int_cst (TREE_TYPE (arg0), 0));
12959 goto fold_binary_exit;
12962 if ((code == LT_EXPR || code == GE_EXPR)
12963 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12964 && CONVERT_EXPR_P (arg1)
12965 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12966 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12968 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12969 fold_convert_loc (loc, TREE_TYPE (arg0),
12970 build2 (RSHIFT_EXPR,
12971 TREE_TYPE (arg0), arg0,
12972 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12974 build_int_cst (TREE_TYPE (arg0), 0));
12975 goto fold_binary_exit;
12980 case UNORDERED_EXPR:
12988 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12990 t1 = fold_relational_const (code, type, arg0, arg1);
12991 if (t1 != NULL_TREE)
12995 /* If the first operand is NaN, the result is constant. */
12996 if (TREE_CODE (arg0) == REAL_CST
12997 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12998 && (code != LTGT_EXPR || ! flag_trapping_math))
13000 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13001 ? integer_zero_node
13002 : integer_one_node;
13003 return omit_one_operand_loc (loc, type, t1, arg1);
13006 /* If the second operand is NaN, the result is constant. */
13007 if (TREE_CODE (arg1) == REAL_CST
13008 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13009 && (code != LTGT_EXPR || ! flag_trapping_math))
13011 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13012 ? integer_zero_node
13013 : integer_one_node;
13014 return omit_one_operand_loc (loc, type, t1, arg0);
13017 /* Simplify unordered comparison of something with itself. */
13018 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13019 && operand_equal_p (arg0, arg1, 0))
13020 return constant_boolean_node (1, type);
13022 if (code == LTGT_EXPR
13023 && !flag_trapping_math
13024 && operand_equal_p (arg0, arg1, 0))
13025 return constant_boolean_node (0, type);
13027 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13029 tree targ0 = strip_float_extensions (arg0);
13030 tree targ1 = strip_float_extensions (arg1);
13031 tree newtype = TREE_TYPE (targ0);
13033 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13034 newtype = TREE_TYPE (targ1);
13036 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13037 return fold_build2_loc (loc, code, type,
13038 fold_convert_loc (loc, newtype, targ0),
13039 fold_convert_loc (loc, newtype, targ1));
13044 case COMPOUND_EXPR:
13045 /* When pedantic, a compound expression can be neither an lvalue
13046 nor an integer constant expression. */
13047 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13049 /* Don't let (0, 0) be null pointer constant. */
13050 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13051 : fold_convert_loc (loc, type, arg1);
13052 return pedantic_non_lvalue_loc (loc, tem);
13055 if ((TREE_CODE (arg0) == REAL_CST
13056 && TREE_CODE (arg1) == REAL_CST)
13057 || (TREE_CODE (arg0) == INTEGER_CST
13058 && TREE_CODE (arg1) == INTEGER_CST))
13059 return build_complex (type, arg0, arg1);
13063 /* An ASSERT_EXPR should never be passed to fold_binary. */
13064 gcc_unreachable ();
13068 } /* switch (code) */
13070 protected_set_expr_location (tem, loc);
13074 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13075 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13079 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13081 switch (TREE_CODE (*tp))
13087 *walk_subtrees = 0;
13089 /* ... fall through ... */
13096 /* Return whether the sub-tree ST contains a label which is accessible from
13097 outside the sub-tree. */
13100 contains_label_p (tree st)
13103 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13106 /* Fold a ternary expression of code CODE and type TYPE with operands
13107 OP0, OP1, and OP2. Return the folded expression if folding is
13108 successful. Otherwise, return NULL_TREE. */
13111 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13112 tree op0, tree op1, tree op2)
13115 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13116 enum tree_code_class kind = TREE_CODE_CLASS (code);
13118 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13119 && TREE_CODE_LENGTH (code) == 3);
13121 /* Strip any conversions that don't change the mode. This is safe
13122 for every expression, except for a comparison expression because
13123 its signedness is derived from its operands. So, in the latter
13124 case, only strip conversions that don't change the signedness.
13126 Note that this is done as an internal manipulation within the
13127 constant folder, in order to find the simplest representation of
13128 the arguments so that their form can be studied. In any cases,
13129 the appropriate type conversions should be put back in the tree
13130 that will get out of the constant folder. */
13145 case COMPONENT_REF:
13146 if (TREE_CODE (arg0) == CONSTRUCTOR
13147 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13149 unsigned HOST_WIDE_INT idx;
13151 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13158 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13159 so all simple results must be passed through pedantic_non_lvalue. */
13160 if (TREE_CODE (arg0) == INTEGER_CST)
13162 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13163 tem = integer_zerop (arg0) ? op2 : op1;
13164 /* Only optimize constant conditions when the selected branch
13165 has the same type as the COND_EXPR. This avoids optimizing
13166 away "c ? x : throw", where the throw has a void type.
13167 Avoid throwing away that operand which contains label. */
13168 if ((!TREE_SIDE_EFFECTS (unused_op)
13169 || !contains_label_p (unused_op))
13170 && (! VOID_TYPE_P (TREE_TYPE (tem))
13171 || VOID_TYPE_P (type)))
13172 return pedantic_non_lvalue_loc (loc, tem);
13175 if (operand_equal_p (arg1, op2, 0))
13176 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13178 /* If we have A op B ? A : C, we may be able to convert this to a
13179 simpler expression, depending on the operation and the values
13180 of B and C. Signed zeros prevent all of these transformations,
13181 for reasons given above each one.
13183 Also try swapping the arguments and inverting the conditional. */
13184 if (COMPARISON_CLASS_P (arg0)
13185 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13186 arg1, TREE_OPERAND (arg0, 1))
13187 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13189 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13194 if (COMPARISON_CLASS_P (arg0)
13195 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13197 TREE_OPERAND (arg0, 1))
13198 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13200 tem = fold_truth_not_expr (loc, arg0);
13201 if (tem && COMPARISON_CLASS_P (tem))
13203 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13209 /* If the second operand is simpler than the third, swap them
13210 since that produces better jump optimization results. */
13211 if (truth_value_p (TREE_CODE (arg0))
13212 && tree_swap_operands_p (op1, op2, false))
13214 /* See if this can be inverted. If it can't, possibly because
13215 it was a floating-point inequality comparison, don't do
13217 tem = fold_truth_not_expr (loc, arg0);
13219 return fold_build3_loc (loc, code, type, tem, op2, op1);
13222 /* Convert A ? 1 : 0 to simply A. */
13223 if (integer_onep (op1)
13224 && integer_zerop (op2)
13225 /* If we try to convert OP0 to our type, the
13226 call to fold will try to move the conversion inside
13227 a COND, which will recurse. In that case, the COND_EXPR
13228 is probably the best choice, so leave it alone. */
13229 && type == TREE_TYPE (arg0))
13230 return pedantic_non_lvalue_loc (loc, arg0);
13232 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13233 over COND_EXPR in cases such as floating point comparisons. */
13234 if (integer_zerop (op1)
13235 && integer_onep (op2)
13236 && truth_value_p (TREE_CODE (arg0)))
13237 return pedantic_non_lvalue_loc (loc,
13238 fold_convert_loc (loc, type,
13239 invert_truthvalue_loc (loc,
13242 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13243 if (TREE_CODE (arg0) == LT_EXPR
13244 && integer_zerop (TREE_OPERAND (arg0, 1))
13245 && integer_zerop (op2)
13246 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13248 /* sign_bit_p only checks ARG1 bits within A's precision.
13249 If <sign bit of A> has wider type than A, bits outside
13250 of A's precision in <sign bit of A> need to be checked.
13251 If they are all 0, this optimization needs to be done
13252 in unsigned A's type, if they are all 1 in signed A's type,
13253 otherwise this can't be done. */
13254 if (TYPE_PRECISION (TREE_TYPE (tem))
13255 < TYPE_PRECISION (TREE_TYPE (arg1))
13256 && TYPE_PRECISION (TREE_TYPE (tem))
13257 < TYPE_PRECISION (type))
13259 unsigned HOST_WIDE_INT mask_lo;
13260 HOST_WIDE_INT mask_hi;
13261 int inner_width, outer_width;
13264 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13265 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13266 if (outer_width > TYPE_PRECISION (type))
13267 outer_width = TYPE_PRECISION (type);
13269 if (outer_width > HOST_BITS_PER_WIDE_INT)
13271 mask_hi = ((unsigned HOST_WIDE_INT) -1
13272 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13278 mask_lo = ((unsigned HOST_WIDE_INT) -1
13279 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13281 if (inner_width > HOST_BITS_PER_WIDE_INT)
13283 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13284 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13288 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13289 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13291 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13292 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13294 tem_type = signed_type_for (TREE_TYPE (tem));
13295 tem = fold_convert_loc (loc, tem_type, tem);
13297 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13298 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13300 tem_type = unsigned_type_for (TREE_TYPE (tem));
13301 tem = fold_convert_loc (loc, tem_type, tem);
13309 fold_convert_loc (loc, type,
13310 fold_build2_loc (loc, BIT_AND_EXPR,
13311 TREE_TYPE (tem), tem,
13312 fold_convert_loc (loc,
13317 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13318 already handled above. */
13319 if (TREE_CODE (arg0) == BIT_AND_EXPR
13320 && integer_onep (TREE_OPERAND (arg0, 1))
13321 && integer_zerop (op2)
13322 && integer_pow2p (arg1))
13324 tree tem = TREE_OPERAND (arg0, 0);
13326 if (TREE_CODE (tem) == RSHIFT_EXPR
13327 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13328 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13329 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13330 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13331 TREE_OPERAND (tem, 0), arg1);
13334 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13335 is probably obsolete because the first operand should be a
13336 truth value (that's why we have the two cases above), but let's
13337 leave it in until we can confirm this for all front-ends. */
13338 if (integer_zerop (op2)
13339 && TREE_CODE (arg0) == NE_EXPR
13340 && integer_zerop (TREE_OPERAND (arg0, 1))
13341 && integer_pow2p (arg1)
13342 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13343 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13344 arg1, OEP_ONLY_CONST))
13345 return pedantic_non_lvalue_loc (loc,
13346 fold_convert_loc (loc, type,
13347 TREE_OPERAND (arg0, 0)));
13349 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13350 if (integer_zerop (op2)
13351 && truth_value_p (TREE_CODE (arg0))
13352 && truth_value_p (TREE_CODE (arg1)))
13353 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13354 fold_convert_loc (loc, type, arg0),
13357 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13358 if (integer_onep (op2)
13359 && truth_value_p (TREE_CODE (arg0))
13360 && truth_value_p (TREE_CODE (arg1)))
13362 /* Only perform transformation if ARG0 is easily inverted. */
13363 tem = fold_truth_not_expr (loc, arg0);
13365 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13366 fold_convert_loc (loc, type, tem),
13370 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13371 if (integer_zerop (arg1)
13372 && truth_value_p (TREE_CODE (arg0))
13373 && truth_value_p (TREE_CODE (op2)))
13375 /* Only perform transformation if ARG0 is easily inverted. */
13376 tem = fold_truth_not_expr (loc, arg0);
13378 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13379 fold_convert_loc (loc, type, tem),
13383 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13384 if (integer_onep (arg1)
13385 && truth_value_p (TREE_CODE (arg0))
13386 && truth_value_p (TREE_CODE (op2)))
13387 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13388 fold_convert_loc (loc, type, arg0),
13394 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13395 of fold_ternary on them. */
13396 gcc_unreachable ();
13398 case BIT_FIELD_REF:
13399 if ((TREE_CODE (arg0) == VECTOR_CST
13400 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13401 && type == TREE_TYPE (TREE_TYPE (arg0)))
13403 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13404 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13407 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13408 && (idx % width) == 0
13409 && (idx = idx / width)
13410 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13412 tree elements = NULL_TREE;
13414 if (TREE_CODE (arg0) == VECTOR_CST)
13415 elements = TREE_VECTOR_CST_ELTS (arg0);
13418 unsigned HOST_WIDE_INT idx;
13421 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13422 elements = tree_cons (NULL_TREE, value, elements);
13424 while (idx-- > 0 && elements)
13425 elements = TREE_CHAIN (elements);
13427 return TREE_VALUE (elements);
13429 return fold_convert_loc (loc, type, integer_zero_node);
13433 /* A bit-field-ref that referenced the full argument can be stripped. */
13434 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13435 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13436 && integer_zerop (op2))
13437 return fold_convert_loc (loc, type, arg0);
13443 } /* switch (code) */
13446 /* Perform constant folding and related simplification of EXPR.
13447 The related simplifications include x*1 => x, x*0 => 0, etc.,
13448 and application of the associative law.
13449 NOP_EXPR conversions may be removed freely (as long as we
13450 are careful not to change the type of the overall expression).
13451 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13452 but we can constant-fold them if they have constant operands. */
13454 #ifdef ENABLE_FOLD_CHECKING
13455 # define fold(x) fold_1 (x)
13456 static tree fold_1 (tree);
13462 const tree t = expr;
13463 enum tree_code code = TREE_CODE (t);
13464 enum tree_code_class kind = TREE_CODE_CLASS (code);
13466 location_t loc = EXPR_LOCATION (expr);
13468 /* Return right away if a constant. */
13469 if (kind == tcc_constant)
13472 /* CALL_EXPR-like objects with variable numbers of operands are
13473 treated specially. */
13474 if (kind == tcc_vl_exp)
13476 if (code == CALL_EXPR)
13478 tem = fold_call_expr (loc, expr, false);
13479 return tem ? tem : expr;
13484 if (IS_EXPR_CODE_CLASS (kind))
13486 tree type = TREE_TYPE (t);
13487 tree op0, op1, op2;
13489 switch (TREE_CODE_LENGTH (code))
13492 op0 = TREE_OPERAND (t, 0);
13493 tem = fold_unary_loc (loc, code, type, op0);
13494 return tem ? tem : expr;
13496 op0 = TREE_OPERAND (t, 0);
13497 op1 = TREE_OPERAND (t, 1);
13498 tem = fold_binary_loc (loc, code, type, op0, op1);
13499 return tem ? tem : expr;
13501 op0 = TREE_OPERAND (t, 0);
13502 op1 = TREE_OPERAND (t, 1);
13503 op2 = TREE_OPERAND (t, 2);
13504 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13505 return tem ? tem : expr;
13515 tree op0 = TREE_OPERAND (t, 0);
13516 tree op1 = TREE_OPERAND (t, 1);
13518 if (TREE_CODE (op1) == INTEGER_CST
13519 && TREE_CODE (op0) == CONSTRUCTOR
13520 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13522 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13523 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13524 unsigned HOST_WIDE_INT begin = 0;
13526 /* Find a matching index by means of a binary search. */
13527 while (begin != end)
13529 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13530 tree index = VEC_index (constructor_elt, elts, middle)->index;
13532 if (TREE_CODE (index) == INTEGER_CST
13533 && tree_int_cst_lt (index, op1))
13534 begin = middle + 1;
13535 else if (TREE_CODE (index) == INTEGER_CST
13536 && tree_int_cst_lt (op1, index))
13538 else if (TREE_CODE (index) == RANGE_EXPR
13539 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13540 begin = middle + 1;
13541 else if (TREE_CODE (index) == RANGE_EXPR
13542 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13545 return VEC_index (constructor_elt, elts, middle)->value;
13553 return fold (DECL_INITIAL (t));
13557 } /* switch (code) */
13560 #ifdef ENABLE_FOLD_CHECKING
13563 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13564 static void fold_check_failed (const_tree, const_tree);
13565 void print_fold_checksum (const_tree);
13567 /* When --enable-checking=fold, compute a digest of expr before
13568 and after actual fold call to see if fold did not accidentally
13569 change original expr. */
13575 struct md5_ctx ctx;
13576 unsigned char checksum_before[16], checksum_after[16];
13579 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13580 md5_init_ctx (&ctx);
13581 fold_checksum_tree (expr, &ctx, ht);
13582 md5_finish_ctx (&ctx, checksum_before);
13585 ret = fold_1 (expr);
13587 md5_init_ctx (&ctx);
13588 fold_checksum_tree (expr, &ctx, ht);
13589 md5_finish_ctx (&ctx, checksum_after);
13592 if (memcmp (checksum_before, checksum_after, 16))
13593 fold_check_failed (expr, ret);
13599 print_fold_checksum (const_tree expr)
13601 struct md5_ctx ctx;
13602 unsigned char checksum[16], cnt;
13605 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13606 md5_init_ctx (&ctx);
13607 fold_checksum_tree (expr, &ctx, ht);
13608 md5_finish_ctx (&ctx, checksum);
13610 for (cnt = 0; cnt < 16; ++cnt)
13611 fprintf (stderr, "%02x", checksum[cnt]);
13612 putc ('\n', stderr);
13616 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13618 internal_error ("fold check: original tree changed by fold");
13622 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13625 enum tree_code code;
13626 union tree_node buf;
13631 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13632 <= sizeof (struct tree_function_decl))
13633 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13636 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13640 code = TREE_CODE (expr);
13641 if (TREE_CODE_CLASS (code) == tcc_declaration
13642 && DECL_ASSEMBLER_NAME_SET_P (expr))
13644 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13645 memcpy ((char *) &buf, expr, tree_size (expr));
13646 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13647 expr = (tree) &buf;
13649 else if (TREE_CODE_CLASS (code) == tcc_type
13650 && (TYPE_POINTER_TO (expr)
13651 || TYPE_REFERENCE_TO (expr)
13652 || TYPE_CACHED_VALUES_P (expr)
13653 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13654 || TYPE_NEXT_VARIANT (expr)))
13656 /* Allow these fields to be modified. */
13658 memcpy ((char *) &buf, expr, tree_size (expr));
13659 expr = tmp = (tree) &buf;
13660 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13661 TYPE_POINTER_TO (tmp) = NULL;
13662 TYPE_REFERENCE_TO (tmp) = NULL;
13663 TYPE_NEXT_VARIANT (tmp) = NULL;
13664 if (TYPE_CACHED_VALUES_P (tmp))
13666 TYPE_CACHED_VALUES_P (tmp) = 0;
13667 TYPE_CACHED_VALUES (tmp) = NULL;
13670 md5_process_bytes (expr, tree_size (expr), ctx);
13671 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13672 if (TREE_CODE_CLASS (code) != tcc_type
13673 && TREE_CODE_CLASS (code) != tcc_declaration
13674 && code != TREE_LIST
13675 && code != SSA_NAME)
13676 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13677 switch (TREE_CODE_CLASS (code))
13683 md5_process_bytes (TREE_STRING_POINTER (expr),
13684 TREE_STRING_LENGTH (expr), ctx);
13687 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13688 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13691 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13697 case tcc_exceptional:
13701 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13702 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13703 expr = TREE_CHAIN (expr);
13704 goto recursive_label;
13707 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13708 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13714 case tcc_expression:
13715 case tcc_reference:
13716 case tcc_comparison:
13719 case tcc_statement:
13721 len = TREE_OPERAND_LENGTH (expr);
13722 for (i = 0; i < len; ++i)
13723 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13725 case tcc_declaration:
13726 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13727 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13728 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13730 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13731 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13732 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13733 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13734 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13736 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13737 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13739 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13741 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13742 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13743 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13747 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13748 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13749 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13750 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13751 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13752 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13753 if (INTEGRAL_TYPE_P (expr)
13754 || SCALAR_FLOAT_TYPE_P (expr))
13756 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13757 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13759 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13760 if (TREE_CODE (expr) == RECORD_TYPE
13761 || TREE_CODE (expr) == UNION_TYPE
13762 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13763 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13764 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13771 /* Helper function for outputting the checksum of a tree T. When
13772 debugging with gdb, you can "define mynext" to be "next" followed
13773 by "call debug_fold_checksum (op0)", then just trace down till the
13776 DEBUG_FUNCTION void
13777 debug_fold_checksum (const_tree t)
13780 unsigned char checksum[16];
13781 struct md5_ctx ctx;
13782 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13784 md5_init_ctx (&ctx);
13785 fold_checksum_tree (t, &ctx, ht);
13786 md5_finish_ctx (&ctx, checksum);
13789 for (i = 0; i < 16; i++)
13790 fprintf (stderr, "%d ", checksum[i]);
13792 fprintf (stderr, "\n");
13797 /* Fold a unary tree expression with code CODE of type TYPE with an
13798 operand OP0. LOC is the location of the resulting expression.
13799 Return a folded expression if successful. Otherwise, return a tree
13800 expression with code CODE of type TYPE with an operand OP0. */
13803 fold_build1_stat_loc (location_t loc,
13804 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13807 #ifdef ENABLE_FOLD_CHECKING
13808 unsigned char checksum_before[16], checksum_after[16];
13809 struct md5_ctx ctx;
13812 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13813 md5_init_ctx (&ctx);
13814 fold_checksum_tree (op0, &ctx, ht);
13815 md5_finish_ctx (&ctx, checksum_before);
13819 tem = fold_unary_loc (loc, code, type, op0);
13822 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13823 SET_EXPR_LOCATION (tem, loc);
13826 #ifdef ENABLE_FOLD_CHECKING
13827 md5_init_ctx (&ctx);
13828 fold_checksum_tree (op0, &ctx, ht);
13829 md5_finish_ctx (&ctx, checksum_after);
13832 if (memcmp (checksum_before, checksum_after, 16))
13833 fold_check_failed (op0, tem);
13838 /* Fold a binary tree expression with code CODE of type TYPE with
13839 operands OP0 and OP1. LOC is the location of the resulting
13840 expression. Return a folded expression if successful. Otherwise,
13841 return a tree expression with code CODE of type TYPE with operands
13845 fold_build2_stat_loc (location_t loc,
13846 enum tree_code code, tree type, tree op0, tree op1
13850 #ifdef ENABLE_FOLD_CHECKING
13851 unsigned char checksum_before_op0[16],
13852 checksum_before_op1[16],
13853 checksum_after_op0[16],
13854 checksum_after_op1[16];
13855 struct md5_ctx ctx;
13858 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13859 md5_init_ctx (&ctx);
13860 fold_checksum_tree (op0, &ctx, ht);
13861 md5_finish_ctx (&ctx, checksum_before_op0);
13864 md5_init_ctx (&ctx);
13865 fold_checksum_tree (op1, &ctx, ht);
13866 md5_finish_ctx (&ctx, checksum_before_op1);
13870 tem = fold_binary_loc (loc, code, type, op0, op1);
13873 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13874 SET_EXPR_LOCATION (tem, loc);
13877 #ifdef ENABLE_FOLD_CHECKING
13878 md5_init_ctx (&ctx);
13879 fold_checksum_tree (op0, &ctx, ht);
13880 md5_finish_ctx (&ctx, checksum_after_op0);
13883 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13884 fold_check_failed (op0, tem);
13886 md5_init_ctx (&ctx);
13887 fold_checksum_tree (op1, &ctx, ht);
13888 md5_finish_ctx (&ctx, checksum_after_op1);
13891 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13892 fold_check_failed (op1, tem);
13897 /* Fold a ternary tree expression with code CODE of type TYPE with
13898 operands OP0, OP1, and OP2. Return a folded expression if
13899 successful. Otherwise, return a tree expression with code CODE of
13900 type TYPE with operands OP0, OP1, and OP2. */
13903 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13904 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13907 #ifdef ENABLE_FOLD_CHECKING
13908 unsigned char checksum_before_op0[16],
13909 checksum_before_op1[16],
13910 checksum_before_op2[16],
13911 checksum_after_op0[16],
13912 checksum_after_op1[16],
13913 checksum_after_op2[16];
13914 struct md5_ctx ctx;
13917 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13918 md5_init_ctx (&ctx);
13919 fold_checksum_tree (op0, &ctx, ht);
13920 md5_finish_ctx (&ctx, checksum_before_op0);
13923 md5_init_ctx (&ctx);
13924 fold_checksum_tree (op1, &ctx, ht);
13925 md5_finish_ctx (&ctx, checksum_before_op1);
13928 md5_init_ctx (&ctx);
13929 fold_checksum_tree (op2, &ctx, ht);
13930 md5_finish_ctx (&ctx, checksum_before_op2);
13934 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13935 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13938 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13939 SET_EXPR_LOCATION (tem, loc);
13942 #ifdef ENABLE_FOLD_CHECKING
13943 md5_init_ctx (&ctx);
13944 fold_checksum_tree (op0, &ctx, ht);
13945 md5_finish_ctx (&ctx, checksum_after_op0);
13948 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13949 fold_check_failed (op0, tem);
13951 md5_init_ctx (&ctx);
13952 fold_checksum_tree (op1, &ctx, ht);
13953 md5_finish_ctx (&ctx, checksum_after_op1);
13956 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13957 fold_check_failed (op1, tem);
13959 md5_init_ctx (&ctx);
13960 fold_checksum_tree (op2, &ctx, ht);
13961 md5_finish_ctx (&ctx, checksum_after_op2);
13964 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13965 fold_check_failed (op2, tem);
13970 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13971 arguments in ARGARRAY, and a null static chain.
13972 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13973 of type TYPE from the given operands as constructed by build_call_array. */
13976 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13977 int nargs, tree *argarray)
13980 #ifdef ENABLE_FOLD_CHECKING
13981 unsigned char checksum_before_fn[16],
13982 checksum_before_arglist[16],
13983 checksum_after_fn[16],
13984 checksum_after_arglist[16];
13985 struct md5_ctx ctx;
13989 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13990 md5_init_ctx (&ctx);
13991 fold_checksum_tree (fn, &ctx, ht);
13992 md5_finish_ctx (&ctx, checksum_before_fn);
13995 md5_init_ctx (&ctx);
13996 for (i = 0; i < nargs; i++)
13997 fold_checksum_tree (argarray[i], &ctx, ht);
13998 md5_finish_ctx (&ctx, checksum_before_arglist);
14002 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14004 #ifdef ENABLE_FOLD_CHECKING
14005 md5_init_ctx (&ctx);
14006 fold_checksum_tree (fn, &ctx, ht);
14007 md5_finish_ctx (&ctx, checksum_after_fn);
14010 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14011 fold_check_failed (fn, tem);
14013 md5_init_ctx (&ctx);
14014 for (i = 0; i < nargs; i++)
14015 fold_checksum_tree (argarray[i], &ctx, ht);
14016 md5_finish_ctx (&ctx, checksum_after_arglist);
14019 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14020 fold_check_failed (NULL_TREE, tem);
14025 /* Perform constant folding and related simplification of initializer
14026 expression EXPR. These behave identically to "fold_buildN" but ignore
14027 potential run-time traps and exceptions that fold must preserve. */
14029 #define START_FOLD_INIT \
14030 int saved_signaling_nans = flag_signaling_nans;\
14031 int saved_trapping_math = flag_trapping_math;\
14032 int saved_rounding_math = flag_rounding_math;\
14033 int saved_trapv = flag_trapv;\
14034 int saved_folding_initializer = folding_initializer;\
14035 flag_signaling_nans = 0;\
14036 flag_trapping_math = 0;\
14037 flag_rounding_math = 0;\
14039 folding_initializer = 1;
14041 #define END_FOLD_INIT \
14042 flag_signaling_nans = saved_signaling_nans;\
14043 flag_trapping_math = saved_trapping_math;\
14044 flag_rounding_math = saved_rounding_math;\
14045 flag_trapv = saved_trapv;\
14046 folding_initializer = saved_folding_initializer;
14049 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14050 tree type, tree op)
14055 result = fold_build1_loc (loc, code, type, op);
14062 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14063 tree type, tree op0, tree op1)
14068 result = fold_build2_loc (loc, code, type, op0, op1);
14075 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14076 tree type, tree op0, tree op1, tree op2)
14081 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14088 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14089 int nargs, tree *argarray)
14094 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14100 #undef START_FOLD_INIT
14101 #undef END_FOLD_INIT
14103 /* Determine if first argument is a multiple of second argument. Return 0 if
14104 it is not, or we cannot easily determined it to be.
14106 An example of the sort of thing we care about (at this point; this routine
14107 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14108 fold cases do now) is discovering that
14110 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14116 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14118 This code also handles discovering that
14120 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14122 is a multiple of 8 so we don't have to worry about dealing with a
14123 possible remainder.
14125 Note that we *look* inside a SAVE_EXPR only to determine how it was
14126 calculated; it is not safe for fold to do much of anything else with the
14127 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14128 at run time. For example, the latter example above *cannot* be implemented
14129 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14130 evaluation time of the original SAVE_EXPR is not necessarily the same at
14131 the time the new expression is evaluated. The only optimization of this
14132 sort that would be valid is changing
14134 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14138 SAVE_EXPR (I) * SAVE_EXPR (J)
14140 (where the same SAVE_EXPR (J) is used in the original and the
14141 transformed version). */
14144 multiple_of_p (tree type, const_tree top, const_tree bottom)
14146 if (operand_equal_p (top, bottom, 0))
14149 if (TREE_CODE (type) != INTEGER_TYPE)
14152 switch (TREE_CODE (top))
14155 /* Bitwise and provides a power of two multiple. If the mask is
14156 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14157 if (!integer_pow2p (bottom))
14162 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14163 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14167 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14168 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14171 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14175 op1 = TREE_OPERAND (top, 1);
14176 /* const_binop may not detect overflow correctly,
14177 so check for it explicitly here. */
14178 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14179 > TREE_INT_CST_LOW (op1)
14180 && TREE_INT_CST_HIGH (op1) == 0
14181 && 0 != (t1 = fold_convert (type,
14182 const_binop (LSHIFT_EXPR,
14185 && !TREE_OVERFLOW (t1))
14186 return multiple_of_p (type, t1, bottom);
14191 /* Can't handle conversions from non-integral or wider integral type. */
14192 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14193 || (TYPE_PRECISION (type)
14194 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14197 /* .. fall through ... */
14200 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14203 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14204 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14207 if (TREE_CODE (bottom) != INTEGER_CST
14208 || integer_zerop (bottom)
14209 || (TYPE_UNSIGNED (type)
14210 && (tree_int_cst_sgn (top) < 0
14211 || tree_int_cst_sgn (bottom) < 0)))
14213 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14221 /* Return true if CODE or TYPE is known to be non-negative. */
14224 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14226 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14227 && truth_value_p (code))
14228 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14229 have a signed:1 type (where the value is -1 and 0). */
14234 /* Return true if (CODE OP0) is known to be non-negative. If the return
14235 value is based on the assumption that signed overflow is undefined,
14236 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14237 *STRICT_OVERFLOW_P. */
14240 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14241 bool *strict_overflow_p)
14243 if (TYPE_UNSIGNED (type))
14249 /* We can't return 1 if flag_wrapv is set because
14250 ABS_EXPR<INT_MIN> = INT_MIN. */
14251 if (!INTEGRAL_TYPE_P (type))
14253 if (TYPE_OVERFLOW_UNDEFINED (type))
14255 *strict_overflow_p = true;
14260 case NON_LVALUE_EXPR:
14262 case FIX_TRUNC_EXPR:
14263 return tree_expr_nonnegative_warnv_p (op0,
14264 strict_overflow_p);
14268 tree inner_type = TREE_TYPE (op0);
14269 tree outer_type = type;
14271 if (TREE_CODE (outer_type) == REAL_TYPE)
14273 if (TREE_CODE (inner_type) == REAL_TYPE)
14274 return tree_expr_nonnegative_warnv_p (op0,
14275 strict_overflow_p);
14276 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14278 if (TYPE_UNSIGNED (inner_type))
14280 return tree_expr_nonnegative_warnv_p (op0,
14281 strict_overflow_p);
14284 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14286 if (TREE_CODE (inner_type) == REAL_TYPE)
14287 return tree_expr_nonnegative_warnv_p (op0,
14288 strict_overflow_p);
14289 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14290 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14291 && TYPE_UNSIGNED (inner_type);
14297 return tree_simple_nonnegative_warnv_p (code, type);
14300 /* We don't know sign of `t', so be conservative and return false. */
14304 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14305 value is based on the assumption that signed overflow is undefined,
14306 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14307 *STRICT_OVERFLOW_P. */
14310 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14311 tree op1, bool *strict_overflow_p)
14313 if (TYPE_UNSIGNED (type))
14318 case POINTER_PLUS_EXPR:
14320 if (FLOAT_TYPE_P (type))
14321 return (tree_expr_nonnegative_warnv_p (op0,
14323 && tree_expr_nonnegative_warnv_p (op1,
14324 strict_overflow_p));
14326 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14327 both unsigned and at least 2 bits shorter than the result. */
14328 if (TREE_CODE (type) == INTEGER_TYPE
14329 && TREE_CODE (op0) == NOP_EXPR
14330 && TREE_CODE (op1) == NOP_EXPR)
14332 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14333 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14334 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14335 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14337 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14338 TYPE_PRECISION (inner2)) + 1;
14339 return prec < TYPE_PRECISION (type);
14345 if (FLOAT_TYPE_P (type))
14347 /* x * x for floating point x is always non-negative. */
14348 if (operand_equal_p (op0, op1, 0))
14350 return (tree_expr_nonnegative_warnv_p (op0,
14352 && tree_expr_nonnegative_warnv_p (op1,
14353 strict_overflow_p));
14356 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14357 both unsigned and their total bits is shorter than the result. */
14358 if (TREE_CODE (type) == INTEGER_TYPE
14359 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14360 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14362 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14363 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14365 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14366 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14369 bool unsigned0 = TYPE_UNSIGNED (inner0);
14370 bool unsigned1 = TYPE_UNSIGNED (inner1);
14372 if (TREE_CODE (op0) == INTEGER_CST)
14373 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14375 if (TREE_CODE (op1) == INTEGER_CST)
14376 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14378 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14379 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14381 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14382 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14383 : TYPE_PRECISION (inner0);
14385 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14386 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14387 : TYPE_PRECISION (inner1);
14389 return precision0 + precision1 < TYPE_PRECISION (type);
14396 return (tree_expr_nonnegative_warnv_p (op0,
14398 || tree_expr_nonnegative_warnv_p (op1,
14399 strict_overflow_p));
14405 case TRUNC_DIV_EXPR:
14406 case CEIL_DIV_EXPR:
14407 case FLOOR_DIV_EXPR:
14408 case ROUND_DIV_EXPR:
14409 return (tree_expr_nonnegative_warnv_p (op0,
14411 && tree_expr_nonnegative_warnv_p (op1,
14412 strict_overflow_p));
14414 case TRUNC_MOD_EXPR:
14415 case CEIL_MOD_EXPR:
14416 case FLOOR_MOD_EXPR:
14417 case ROUND_MOD_EXPR:
14418 return tree_expr_nonnegative_warnv_p (op0,
14419 strict_overflow_p);
14421 return tree_simple_nonnegative_warnv_p (code, type);
14424 /* We don't know sign of `t', so be conservative and return false. */
14428 /* Return true if T is known to be non-negative. If the return
14429 value is based on the assumption that signed overflow is undefined,
14430 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14431 *STRICT_OVERFLOW_P. */
14434 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14436 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14439 switch (TREE_CODE (t))
14442 return tree_int_cst_sgn (t) >= 0;
14445 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14448 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14451 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14453 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14454 strict_overflow_p));
14456 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14459 /* We don't know sign of `t', so be conservative and return false. */
14463 /* Return true if T is known to be non-negative. If the return
14464 value is based on the assumption that signed overflow is undefined,
14465 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14466 *STRICT_OVERFLOW_P. */
14469 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14470 tree arg0, tree arg1, bool *strict_overflow_p)
14472 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14473 switch (DECL_FUNCTION_CODE (fndecl))
14475 CASE_FLT_FN (BUILT_IN_ACOS):
14476 CASE_FLT_FN (BUILT_IN_ACOSH):
14477 CASE_FLT_FN (BUILT_IN_CABS):
14478 CASE_FLT_FN (BUILT_IN_COSH):
14479 CASE_FLT_FN (BUILT_IN_ERFC):
14480 CASE_FLT_FN (BUILT_IN_EXP):
14481 CASE_FLT_FN (BUILT_IN_EXP10):
14482 CASE_FLT_FN (BUILT_IN_EXP2):
14483 CASE_FLT_FN (BUILT_IN_FABS):
14484 CASE_FLT_FN (BUILT_IN_FDIM):
14485 CASE_FLT_FN (BUILT_IN_HYPOT):
14486 CASE_FLT_FN (BUILT_IN_POW10):
14487 CASE_INT_FN (BUILT_IN_FFS):
14488 CASE_INT_FN (BUILT_IN_PARITY):
14489 CASE_INT_FN (BUILT_IN_POPCOUNT):
14490 case BUILT_IN_BSWAP32:
14491 case BUILT_IN_BSWAP64:
14495 CASE_FLT_FN (BUILT_IN_SQRT):
14496 /* sqrt(-0.0) is -0.0. */
14497 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14499 return tree_expr_nonnegative_warnv_p (arg0,
14500 strict_overflow_p);
14502 CASE_FLT_FN (BUILT_IN_ASINH):
14503 CASE_FLT_FN (BUILT_IN_ATAN):
14504 CASE_FLT_FN (BUILT_IN_ATANH):
14505 CASE_FLT_FN (BUILT_IN_CBRT):
14506 CASE_FLT_FN (BUILT_IN_CEIL):
14507 CASE_FLT_FN (BUILT_IN_ERF):
14508 CASE_FLT_FN (BUILT_IN_EXPM1):
14509 CASE_FLT_FN (BUILT_IN_FLOOR):
14510 CASE_FLT_FN (BUILT_IN_FMOD):
14511 CASE_FLT_FN (BUILT_IN_FREXP):
14512 CASE_FLT_FN (BUILT_IN_LCEIL):
14513 CASE_FLT_FN (BUILT_IN_LDEXP):
14514 CASE_FLT_FN (BUILT_IN_LFLOOR):
14515 CASE_FLT_FN (BUILT_IN_LLCEIL):
14516 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14517 CASE_FLT_FN (BUILT_IN_LLRINT):
14518 CASE_FLT_FN (BUILT_IN_LLROUND):
14519 CASE_FLT_FN (BUILT_IN_LRINT):
14520 CASE_FLT_FN (BUILT_IN_LROUND):
14521 CASE_FLT_FN (BUILT_IN_MODF):
14522 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14523 CASE_FLT_FN (BUILT_IN_RINT):
14524 CASE_FLT_FN (BUILT_IN_ROUND):
14525 CASE_FLT_FN (BUILT_IN_SCALB):
14526 CASE_FLT_FN (BUILT_IN_SCALBLN):
14527 CASE_FLT_FN (BUILT_IN_SCALBN):
14528 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14529 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14530 CASE_FLT_FN (BUILT_IN_SINH):
14531 CASE_FLT_FN (BUILT_IN_TANH):
14532 CASE_FLT_FN (BUILT_IN_TRUNC):
14533 /* True if the 1st argument is nonnegative. */
14534 return tree_expr_nonnegative_warnv_p (arg0,
14535 strict_overflow_p);
14537 CASE_FLT_FN (BUILT_IN_FMAX):
14538 /* True if the 1st OR 2nd arguments are nonnegative. */
14539 return (tree_expr_nonnegative_warnv_p (arg0,
14541 || (tree_expr_nonnegative_warnv_p (arg1,
14542 strict_overflow_p)));
14544 CASE_FLT_FN (BUILT_IN_FMIN):
14545 /* True if the 1st AND 2nd arguments are nonnegative. */
14546 return (tree_expr_nonnegative_warnv_p (arg0,
14548 && (tree_expr_nonnegative_warnv_p (arg1,
14549 strict_overflow_p)));
14551 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14552 /* True if the 2nd argument is nonnegative. */
14553 return tree_expr_nonnegative_warnv_p (arg1,
14554 strict_overflow_p);
14556 CASE_FLT_FN (BUILT_IN_POWI):
14557 /* True if the 1st argument is nonnegative or the second
14558 argument is an even integer. */
14559 if (TREE_CODE (arg1) == INTEGER_CST
14560 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14562 return tree_expr_nonnegative_warnv_p (arg0,
14563 strict_overflow_p);
14565 CASE_FLT_FN (BUILT_IN_POW):
14566 /* True if the 1st argument is nonnegative or the second
14567 argument is an even integer valued real. */
14568 if (TREE_CODE (arg1) == REAL_CST)
14573 c = TREE_REAL_CST (arg1);
14574 n = real_to_integer (&c);
14577 REAL_VALUE_TYPE cint;
14578 real_from_integer (&cint, VOIDmode, n,
14579 n < 0 ? -1 : 0, 0);
14580 if (real_identical (&c, &cint))
14584 return tree_expr_nonnegative_warnv_p (arg0,
14585 strict_overflow_p);
14590 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14594 /* Return true if T is known to be non-negative. If the return
14595 value is based on the assumption that signed overflow is undefined,
14596 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14597 *STRICT_OVERFLOW_P. */
14600 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14602 enum tree_code code = TREE_CODE (t);
14603 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14610 tree temp = TARGET_EXPR_SLOT (t);
14611 t = TARGET_EXPR_INITIAL (t);
14613 /* If the initializer is non-void, then it's a normal expression
14614 that will be assigned to the slot. */
14615 if (!VOID_TYPE_P (t))
14616 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14618 /* Otherwise, the initializer sets the slot in some way. One common
14619 way is an assignment statement at the end of the initializer. */
14622 if (TREE_CODE (t) == BIND_EXPR)
14623 t = expr_last (BIND_EXPR_BODY (t));
14624 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14625 || TREE_CODE (t) == TRY_CATCH_EXPR)
14626 t = expr_last (TREE_OPERAND (t, 0));
14627 else if (TREE_CODE (t) == STATEMENT_LIST)
14632 if (TREE_CODE (t) == MODIFY_EXPR
14633 && TREE_OPERAND (t, 0) == temp)
14634 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14635 strict_overflow_p);
14642 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14643 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14645 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14646 get_callee_fndecl (t),
14649 strict_overflow_p);
14651 case COMPOUND_EXPR:
14653 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14654 strict_overflow_p);
14656 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14657 strict_overflow_p);
14659 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14660 strict_overflow_p);
14663 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14667 /* We don't know sign of `t', so be conservative and return false. */
14671 /* Return true if T is known to be non-negative. If the return
14672 value is based on the assumption that signed overflow is undefined,
14673 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14674 *STRICT_OVERFLOW_P. */
14677 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14679 enum tree_code code;
14680 if (t == error_mark_node)
14683 code = TREE_CODE (t);
14684 switch (TREE_CODE_CLASS (code))
14687 case tcc_comparison:
14688 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14690 TREE_OPERAND (t, 0),
14691 TREE_OPERAND (t, 1),
14692 strict_overflow_p);
14695 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14697 TREE_OPERAND (t, 0),
14698 strict_overflow_p);
14701 case tcc_declaration:
14702 case tcc_reference:
14703 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14711 case TRUTH_AND_EXPR:
14712 case TRUTH_OR_EXPR:
14713 case TRUTH_XOR_EXPR:
14714 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14716 TREE_OPERAND (t, 0),
14717 TREE_OPERAND (t, 1),
14718 strict_overflow_p);
14719 case TRUTH_NOT_EXPR:
14720 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14722 TREE_OPERAND (t, 0),
14723 strict_overflow_p);
14730 case WITH_SIZE_EXPR:
14732 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14735 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14739 /* Return true if `t' is known to be non-negative. Handle warnings
14740 about undefined signed overflow. */
14743 tree_expr_nonnegative_p (tree t)
14745 bool ret, strict_overflow_p;
14747 strict_overflow_p = false;
14748 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14749 if (strict_overflow_p)
14750 fold_overflow_warning (("assuming signed overflow does not occur when "
14751 "determining that expression is always "
14753 WARN_STRICT_OVERFLOW_MISC);
14758 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14759 For floating point we further ensure that T is not denormal.
14760 Similar logic is present in nonzero_address in rtlanal.h.
14762 If the return value is based on the assumption that signed overflow
14763 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14764 change *STRICT_OVERFLOW_P. */
14767 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14768 bool *strict_overflow_p)
14773 return tree_expr_nonzero_warnv_p (op0,
14774 strict_overflow_p);
14778 tree inner_type = TREE_TYPE (op0);
14779 tree outer_type = type;
14781 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14782 && tree_expr_nonzero_warnv_p (op0,
14783 strict_overflow_p));
14787 case NON_LVALUE_EXPR:
14788 return tree_expr_nonzero_warnv_p (op0,
14789 strict_overflow_p);
14798 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14799 For floating point we further ensure that T is not denormal.
14800 Similar logic is present in nonzero_address in rtlanal.h.
14802 If the return value is based on the assumption that signed overflow
14803 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14804 change *STRICT_OVERFLOW_P. */
14807 tree_binary_nonzero_warnv_p (enum tree_code code,
14810 tree op1, bool *strict_overflow_p)
14812 bool sub_strict_overflow_p;
14815 case POINTER_PLUS_EXPR:
14817 if (TYPE_OVERFLOW_UNDEFINED (type))
14819 /* With the presence of negative values it is hard
14820 to say something. */
14821 sub_strict_overflow_p = false;
14822 if (!tree_expr_nonnegative_warnv_p (op0,
14823 &sub_strict_overflow_p)
14824 || !tree_expr_nonnegative_warnv_p (op1,
14825 &sub_strict_overflow_p))
14827 /* One of operands must be positive and the other non-negative. */
14828 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14829 overflows, on a twos-complement machine the sum of two
14830 nonnegative numbers can never be zero. */
14831 return (tree_expr_nonzero_warnv_p (op0,
14833 || tree_expr_nonzero_warnv_p (op1,
14834 strict_overflow_p));
14839 if (TYPE_OVERFLOW_UNDEFINED (type))
14841 if (tree_expr_nonzero_warnv_p (op0,
14843 && tree_expr_nonzero_warnv_p (op1,
14844 strict_overflow_p))
14846 *strict_overflow_p = true;
14853 sub_strict_overflow_p = false;
14854 if (tree_expr_nonzero_warnv_p (op0,
14855 &sub_strict_overflow_p)
14856 && tree_expr_nonzero_warnv_p (op1,
14857 &sub_strict_overflow_p))
14859 if (sub_strict_overflow_p)
14860 *strict_overflow_p = true;
14865 sub_strict_overflow_p = false;
14866 if (tree_expr_nonzero_warnv_p (op0,
14867 &sub_strict_overflow_p))
14869 if (sub_strict_overflow_p)
14870 *strict_overflow_p = true;
14872 /* When both operands are nonzero, then MAX must be too. */
14873 if (tree_expr_nonzero_warnv_p (op1,
14874 strict_overflow_p))
14877 /* MAX where operand 0 is positive is positive. */
14878 return tree_expr_nonnegative_warnv_p (op0,
14879 strict_overflow_p);
14881 /* MAX where operand 1 is positive is positive. */
14882 else if (tree_expr_nonzero_warnv_p (op1,
14883 &sub_strict_overflow_p)
14884 && tree_expr_nonnegative_warnv_p (op1,
14885 &sub_strict_overflow_p))
14887 if (sub_strict_overflow_p)
14888 *strict_overflow_p = true;
14894 return (tree_expr_nonzero_warnv_p (op1,
14896 || tree_expr_nonzero_warnv_p (op0,
14897 strict_overflow_p));
14906 /* Return true when T is an address and is known to be nonzero.
14907 For floating point we further ensure that T is not denormal.
14908 Similar logic is present in nonzero_address in rtlanal.h.
14910 If the return value is based on the assumption that signed overflow
14911 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14912 change *STRICT_OVERFLOW_P. */
14915 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14917 bool sub_strict_overflow_p;
14918 switch (TREE_CODE (t))
14921 return !integer_zerop (t);
14925 tree base = TREE_OPERAND (t, 0);
14926 if (!DECL_P (base))
14927 base = get_base_address (base);
14932 /* Weak declarations may link to NULL. Other things may also be NULL
14933 so protect with -fdelete-null-pointer-checks; but not variables
14934 allocated on the stack. */
14936 && (flag_delete_null_pointer_checks
14937 || (DECL_CONTEXT (base)
14938 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14939 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
14940 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
14942 /* Constants are never weak. */
14943 if (CONSTANT_CLASS_P (base))
14950 sub_strict_overflow_p = false;
14951 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14952 &sub_strict_overflow_p)
14953 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14954 &sub_strict_overflow_p))
14956 if (sub_strict_overflow_p)
14957 *strict_overflow_p = true;
14968 /* Return true when T is an address and is known to be nonzero.
14969 For floating point we further ensure that T is not denormal.
14970 Similar logic is present in nonzero_address in rtlanal.h.
14972 If the return value is based on the assumption that signed overflow
14973 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14974 change *STRICT_OVERFLOW_P. */
14977 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14979 tree type = TREE_TYPE (t);
14980 enum tree_code code;
14982 /* Doing something useful for floating point would need more work. */
14983 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14986 code = TREE_CODE (t);
14987 switch (TREE_CODE_CLASS (code))
14990 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14991 strict_overflow_p);
14993 case tcc_comparison:
14994 return tree_binary_nonzero_warnv_p (code, type,
14995 TREE_OPERAND (t, 0),
14996 TREE_OPERAND (t, 1),
14997 strict_overflow_p);
14999 case tcc_declaration:
15000 case tcc_reference:
15001 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15009 case TRUTH_NOT_EXPR:
15010 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15011 strict_overflow_p);
15013 case TRUTH_AND_EXPR:
15014 case TRUTH_OR_EXPR:
15015 case TRUTH_XOR_EXPR:
15016 return tree_binary_nonzero_warnv_p (code, type,
15017 TREE_OPERAND (t, 0),
15018 TREE_OPERAND (t, 1),
15019 strict_overflow_p);
15026 case WITH_SIZE_EXPR:
15028 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15030 case COMPOUND_EXPR:
15033 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15034 strict_overflow_p);
15037 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15038 strict_overflow_p);
15041 return alloca_call_p (t);
15049 /* Return true when T is an address and is known to be nonzero.
15050 Handle warnings about undefined signed overflow. */
15053 tree_expr_nonzero_p (tree t)
15055 bool ret, strict_overflow_p;
15057 strict_overflow_p = false;
15058 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15059 if (strict_overflow_p)
15060 fold_overflow_warning (("assuming signed overflow does not occur when "
15061 "determining that expression is always "
15063 WARN_STRICT_OVERFLOW_MISC);
15067 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15068 attempt to fold the expression to a constant without modifying TYPE,
15071 If the expression could be simplified to a constant, then return
15072 the constant. If the expression would not be simplified to a
15073 constant, then return NULL_TREE. */
15076 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15078 tree tem = fold_binary (code, type, op0, op1);
15079 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15082 /* Given the components of a unary expression CODE, TYPE and OP0,
15083 attempt to fold the expression to a constant without modifying
15086 If the expression could be simplified to a constant, then return
15087 the constant. If the expression would not be simplified to a
15088 constant, then return NULL_TREE. */
15091 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15093 tree tem = fold_unary (code, type, op0);
15094 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15097 /* If EXP represents referencing an element in a constant string
15098 (either via pointer arithmetic or array indexing), return the
15099 tree representing the value accessed, otherwise return NULL. */
15102 fold_read_from_constant_string (tree exp)
15104 if ((TREE_CODE (exp) == INDIRECT_REF
15105 || TREE_CODE (exp) == ARRAY_REF)
15106 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15108 tree exp1 = TREE_OPERAND (exp, 0);
15111 location_t loc = EXPR_LOCATION (exp);
15113 if (TREE_CODE (exp) == INDIRECT_REF)
15114 string = string_constant (exp1, &index);
15117 tree low_bound = array_ref_low_bound (exp);
15118 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15120 /* Optimize the special-case of a zero lower bound.
15122 We convert the low_bound to sizetype to avoid some problems
15123 with constant folding. (E.g. suppose the lower bound is 1,
15124 and its mode is QI. Without the conversion,l (ARRAY
15125 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15126 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15127 if (! integer_zerop (low_bound))
15128 index = size_diffop_loc (loc, index,
15129 fold_convert_loc (loc, sizetype, low_bound));
15135 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15136 && TREE_CODE (string) == STRING_CST
15137 && TREE_CODE (index) == INTEGER_CST
15138 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15139 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15141 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15142 return build_int_cst_type (TREE_TYPE (exp),
15143 (TREE_STRING_POINTER (string)
15144 [TREE_INT_CST_LOW (index)]));
15149 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15150 an integer constant, real, or fixed-point constant.
15152 TYPE is the type of the result. */
15155 fold_negate_const (tree arg0, tree type)
15157 tree t = NULL_TREE;
15159 switch (TREE_CODE (arg0))
15163 unsigned HOST_WIDE_INT low;
15164 HOST_WIDE_INT high;
15165 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15166 TREE_INT_CST_HIGH (arg0),
15168 t = force_fit_type_double (type, low, high, 1,
15169 (overflow | TREE_OVERFLOW (arg0))
15170 && !TYPE_UNSIGNED (type));
15175 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15180 FIXED_VALUE_TYPE f;
15181 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15182 &(TREE_FIXED_CST (arg0)), NULL,
15183 TYPE_SATURATING (type));
15184 t = build_fixed (type, f);
15185 /* Propagate overflow flags. */
15186 if (overflow_p | TREE_OVERFLOW (arg0))
15187 TREE_OVERFLOW (t) = 1;
15192 gcc_unreachable ();
15198 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15199 an integer constant or real constant.
15201 TYPE is the type of the result. */
15204 fold_abs_const (tree arg0, tree type)
15206 tree t = NULL_TREE;
15208 switch (TREE_CODE (arg0))
15211 /* If the value is unsigned, then the absolute value is
15212 the same as the ordinary value. */
15213 if (TYPE_UNSIGNED (type))
15215 /* Similarly, if the value is non-negative. */
15216 else if (INT_CST_LT (integer_minus_one_node, arg0))
15218 /* If the value is negative, then the absolute value is
15222 unsigned HOST_WIDE_INT low;
15223 HOST_WIDE_INT high;
15224 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15225 TREE_INT_CST_HIGH (arg0),
15227 t = force_fit_type_double (type, low, high, -1,
15228 overflow | TREE_OVERFLOW (arg0));
15233 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15234 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15240 gcc_unreachable ();
15246 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15247 constant. TYPE is the type of the result. */
15250 fold_not_const (tree arg0, tree type)
15252 tree t = NULL_TREE;
15254 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15256 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15257 ~TREE_INT_CST_HIGH (arg0), 0,
15258 TREE_OVERFLOW (arg0));
15263 /* Given CODE, a relational operator, the target type, TYPE and two
15264 constant operands OP0 and OP1, return the result of the
15265 relational operation. If the result is not a compile time
15266 constant, then return NULL_TREE. */
15269 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15271 int result, invert;
15273 /* From here on, the only cases we handle are when the result is
15274 known to be a constant. */
15276 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15278 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15279 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15281 /* Handle the cases where either operand is a NaN. */
15282 if (real_isnan (c0) || real_isnan (c1))
15292 case UNORDERED_EXPR:
15306 if (flag_trapping_math)
15312 gcc_unreachable ();
15315 return constant_boolean_node (result, type);
15318 return constant_boolean_node (real_compare (code, c0, c1), type);
15321 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15323 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15324 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15325 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15328 /* Handle equality/inequality of complex constants. */
15329 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15331 tree rcond = fold_relational_const (code, type,
15332 TREE_REALPART (op0),
15333 TREE_REALPART (op1));
15334 tree icond = fold_relational_const (code, type,
15335 TREE_IMAGPART (op0),
15336 TREE_IMAGPART (op1));
15337 if (code == EQ_EXPR)
15338 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15339 else if (code == NE_EXPR)
15340 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15345 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15347 To compute GT, swap the arguments and do LT.
15348 To compute GE, do LT and invert the result.
15349 To compute LE, swap the arguments, do LT and invert the result.
15350 To compute NE, do EQ and invert the result.
15352 Therefore, the code below must handle only EQ and LT. */
15354 if (code == LE_EXPR || code == GT_EXPR)
15359 code = swap_tree_comparison (code);
15362 /* Note that it is safe to invert for real values here because we
15363 have already handled the one case that it matters. */
15366 if (code == NE_EXPR || code == GE_EXPR)
15369 code = invert_tree_comparison (code, false);
15372 /* Compute a result for LT or EQ if args permit;
15373 Otherwise return T. */
15374 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15376 if (code == EQ_EXPR)
15377 result = tree_int_cst_equal (op0, op1);
15378 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15379 result = INT_CST_LT_UNSIGNED (op0, op1);
15381 result = INT_CST_LT (op0, op1);
15388 return constant_boolean_node (result, type);
15391 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15392 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15396 fold_build_cleanup_point_expr (tree type, tree expr)
15398 /* If the expression does not have side effects then we don't have to wrap
15399 it with a cleanup point expression. */
15400 if (!TREE_SIDE_EFFECTS (expr))
15403 /* If the expression is a return, check to see if the expression inside the
15404 return has no side effects or the right hand side of the modify expression
15405 inside the return. If either don't have side effects set we don't need to
15406 wrap the expression in a cleanup point expression. Note we don't check the
15407 left hand side of the modify because it should always be a return decl. */
15408 if (TREE_CODE (expr) == RETURN_EXPR)
15410 tree op = TREE_OPERAND (expr, 0);
15411 if (!op || !TREE_SIDE_EFFECTS (op))
15413 op = TREE_OPERAND (op, 1);
15414 if (!TREE_SIDE_EFFECTS (op))
15418 return build1 (CLEANUP_POINT_EXPR, type, expr);
15421 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15422 of an indirection through OP0, or NULL_TREE if no simplification is
15426 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15432 subtype = TREE_TYPE (sub);
15433 if (!POINTER_TYPE_P (subtype))
15436 if (TREE_CODE (sub) == ADDR_EXPR)
15438 tree op = TREE_OPERAND (sub, 0);
15439 tree optype = TREE_TYPE (op);
15440 /* *&CONST_DECL -> to the value of the const decl. */
15441 if (TREE_CODE (op) == CONST_DECL)
15442 return DECL_INITIAL (op);
15443 /* *&p => p; make sure to handle *&"str"[cst] here. */
15444 if (type == optype)
15446 tree fop = fold_read_from_constant_string (op);
15452 /* *(foo *)&fooarray => fooarray[0] */
15453 else if (TREE_CODE (optype) == ARRAY_TYPE
15454 && type == TREE_TYPE (optype))
15456 tree type_domain = TYPE_DOMAIN (optype);
15457 tree min_val = size_zero_node;
15458 if (type_domain && TYPE_MIN_VALUE (type_domain))
15459 min_val = TYPE_MIN_VALUE (type_domain);
15460 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15461 SET_EXPR_LOCATION (op0, loc);
15464 /* *(foo *)&complexfoo => __real__ complexfoo */
15465 else if (TREE_CODE (optype) == COMPLEX_TYPE
15466 && type == TREE_TYPE (optype))
15467 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15468 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15469 else if (TREE_CODE (optype) == VECTOR_TYPE
15470 && type == TREE_TYPE (optype))
15472 tree part_width = TYPE_SIZE (type);
15473 tree index = bitsize_int (0);
15474 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15478 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15479 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15480 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15482 tree op00 = TREE_OPERAND (sub, 0);
15483 tree op01 = TREE_OPERAND (sub, 1);
15487 op00type = TREE_TYPE (op00);
15488 if (TREE_CODE (op00) == ADDR_EXPR
15489 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15490 && type == TREE_TYPE (TREE_TYPE (op00type)))
15492 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15493 tree part_width = TYPE_SIZE (type);
15494 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15495 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15496 tree index = bitsize_int (indexi);
15498 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15499 return fold_build3_loc (loc,
15500 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15501 part_width, index);
15507 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15508 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15509 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15511 tree op00 = TREE_OPERAND (sub, 0);
15512 tree op01 = TREE_OPERAND (sub, 1);
15516 op00type = TREE_TYPE (op00);
15517 if (TREE_CODE (op00) == ADDR_EXPR
15518 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15519 && type == TREE_TYPE (TREE_TYPE (op00type)))
15521 tree size = TYPE_SIZE_UNIT (type);
15522 if (tree_int_cst_equal (size, op01))
15523 return fold_build1_loc (loc, IMAGPART_EXPR, type,
15524 TREE_OPERAND (op00, 0));
15528 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15529 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15530 && type == TREE_TYPE (TREE_TYPE (subtype)))
15533 tree min_val = size_zero_node;
15534 sub = build_fold_indirect_ref_loc (loc, sub);
15535 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15536 if (type_domain && TYPE_MIN_VALUE (type_domain))
15537 min_val = TYPE_MIN_VALUE (type_domain);
15538 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15539 SET_EXPR_LOCATION (op0, loc);
15546 /* Builds an expression for an indirection through T, simplifying some
15550 build_fold_indirect_ref_loc (location_t loc, tree t)
15552 tree type = TREE_TYPE (TREE_TYPE (t));
15553 tree sub = fold_indirect_ref_1 (loc, type, t);
15558 t = build1 (INDIRECT_REF, type, t);
15559 SET_EXPR_LOCATION (t, loc);
15563 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15566 fold_indirect_ref_loc (location_t loc, tree t)
15568 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15576 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15577 whose result is ignored. The type of the returned tree need not be
15578 the same as the original expression. */
15581 fold_ignored_result (tree t)
15583 if (!TREE_SIDE_EFFECTS (t))
15584 return integer_zero_node;
15587 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15590 t = TREE_OPERAND (t, 0);
15594 case tcc_comparison:
15595 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15596 t = TREE_OPERAND (t, 0);
15597 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15598 t = TREE_OPERAND (t, 1);
15603 case tcc_expression:
15604 switch (TREE_CODE (t))
15606 case COMPOUND_EXPR:
15607 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15609 t = TREE_OPERAND (t, 0);
15613 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15614 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15616 t = TREE_OPERAND (t, 0);
15629 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15630 This can only be applied to objects of a sizetype. */
15633 round_up_loc (location_t loc, tree value, int divisor)
15635 tree div = NULL_TREE;
15637 gcc_assert (divisor > 0);
15641 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15642 have to do anything. Only do this when we are not given a const,
15643 because in that case, this check is more expensive than just
15645 if (TREE_CODE (value) != INTEGER_CST)
15647 div = build_int_cst (TREE_TYPE (value), divisor);
15649 if (multiple_of_p (TREE_TYPE (value), value, div))
15653 /* If divisor is a power of two, simplify this to bit manipulation. */
15654 if (divisor == (divisor & -divisor))
15656 if (TREE_CODE (value) == INTEGER_CST)
15658 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15659 unsigned HOST_WIDE_INT high;
15662 if ((low & (divisor - 1)) == 0)
15665 overflow_p = TREE_OVERFLOW (value);
15666 high = TREE_INT_CST_HIGH (value);
15667 low &= ~(divisor - 1);
15676 return force_fit_type_double (TREE_TYPE (value), low, high,
15683 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15684 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15685 t = build_int_cst (TREE_TYPE (value), -divisor);
15686 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15692 div = build_int_cst (TREE_TYPE (value), divisor);
15693 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15694 value = size_binop_loc (loc, MULT_EXPR, value, div);
15700 /* Likewise, but round down. */
15703 round_down_loc (location_t loc, tree value, int divisor)
15705 tree div = NULL_TREE;
15707 gcc_assert (divisor > 0);
15711 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15712 have to do anything. Only do this when we are not given a const,
15713 because in that case, this check is more expensive than just
15715 if (TREE_CODE (value) != INTEGER_CST)
15717 div = build_int_cst (TREE_TYPE (value), divisor);
15719 if (multiple_of_p (TREE_TYPE (value), value, div))
15723 /* If divisor is a power of two, simplify this to bit manipulation. */
15724 if (divisor == (divisor & -divisor))
15728 t = build_int_cst (TREE_TYPE (value), -divisor);
15729 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15734 div = build_int_cst (TREE_TYPE (value), divisor);
15735 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15736 value = size_binop_loc (loc, MULT_EXPR, value, div);
15742 /* Returns the pointer to the base of the object addressed by EXP and
15743 extracts the information about the offset of the access, storing it
15744 to PBITPOS and POFFSET. */
15747 split_address_to_core_and_offset (tree exp,
15748 HOST_WIDE_INT *pbitpos, tree *poffset)
15751 enum machine_mode mode;
15752 int unsignedp, volatilep;
15753 HOST_WIDE_INT bitsize;
15754 location_t loc = EXPR_LOCATION (exp);
15756 if (TREE_CODE (exp) == ADDR_EXPR)
15758 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15759 poffset, &mode, &unsignedp, &volatilep,
15761 core = build_fold_addr_expr_loc (loc, core);
15767 *poffset = NULL_TREE;
15773 /* Returns true if addresses of E1 and E2 differ by a constant, false
15774 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15777 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15780 HOST_WIDE_INT bitpos1, bitpos2;
15781 tree toffset1, toffset2, tdiff, type;
15783 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15784 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15786 if (bitpos1 % BITS_PER_UNIT != 0
15787 || bitpos2 % BITS_PER_UNIT != 0
15788 || !operand_equal_p (core1, core2, 0))
15791 if (toffset1 && toffset2)
15793 type = TREE_TYPE (toffset1);
15794 if (type != TREE_TYPE (toffset2))
15795 toffset2 = fold_convert (type, toffset2);
15797 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15798 if (!cst_and_fits_in_hwi (tdiff))
15801 *diff = int_cst_value (tdiff);
15803 else if (toffset1 || toffset2)
15805 /* If only one of the offsets is non-constant, the difference cannot
15812 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15816 /* Simplify the floating point expression EXP when the sign of the
15817 result is not significant. Return NULL_TREE if no simplification
15821 fold_strip_sign_ops (tree exp)
15824 location_t loc = EXPR_LOCATION (exp);
15826 switch (TREE_CODE (exp))
15830 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15831 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15835 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15837 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15838 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15839 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15840 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15841 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15842 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15845 case COMPOUND_EXPR:
15846 arg0 = TREE_OPERAND (exp, 0);
15847 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15849 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15853 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15854 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15856 return fold_build3_loc (loc,
15857 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15858 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15859 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15864 const enum built_in_function fcode = builtin_mathfn_code (exp);
15867 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15868 /* Strip copysign function call, return the 1st argument. */
15869 arg0 = CALL_EXPR_ARG (exp, 0);
15870 arg1 = CALL_EXPR_ARG (exp, 1);
15871 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15874 /* Strip sign ops from the argument of "odd" math functions. */
15875 if (negate_mathfn_p (fcode))
15877 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15879 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);